diff --git a/Alignment/HIPAlignmentAlgorithm/python/OptionParser/HipPyOptionParser.py b/Alignment/HIPAlignmentAlgorithm/python/OptionParser/HipPyOptionParser.py index f06d7a30a2394..d2d0dc463ce62 100644 --- a/Alignment/HIPAlignmentAlgorithm/python/OptionParser/HipPyOptionParser.py +++ b/Alignment/HIPAlignmentAlgorithm/python/OptionParser/HipPyOptionParser.py @@ -4,7 +4,6 @@ from copy import deepcopy import FWCore.ParameterSet.Config as cms import FWCore.PythonUtilities.LumiList as LumiList -import six # Helper functions def getPSetDict(thePSet): @@ -14,7 +13,7 @@ def insertValToPSet(name,val,thePSet): setattr(thePSet,name,val) def insertPSetToPSet(inPSet, outPSet): - for key,val in getPSetDict(six.iteritems(inPSet)): + for key,val in getPSetDict(inPSet.items()): insertValToPSet(key,val,outPSet) def insertPSetToVPSet(inPSet, outVPSet): @@ -112,7 +111,7 @@ def parseOptions(self): def interpretOptions(self): gttogetpsets=[] - for key,val in six.iteritems(self.optdict): + for key,val in self.optdict.items(): # Get GT name if key=="gt": autofind=val.find("auto") diff --git a/Alignment/MillePedeAlignmentAlgorithm/python/mpslib/tools.py b/Alignment/MillePedeAlignmentAlgorithm/python/mpslib/tools.py index 213d1c87ad14c..8f11f81db1ccd 100644 --- a/Alignment/MillePedeAlignmentAlgorithm/python/mpslib/tools.py +++ b/Alignment/MillePedeAlignmentAlgorithm/python/mpslib/tools.py @@ -10,7 +10,6 @@ import CondCore.Utilities.conddblib as conddb from functools import reduce -import six def create_single_iov_db(inputs, run_number, output_db): """Create an sqlite file with single-IOV tags for alignment payloads. @@ -22,7 +21,7 @@ def create_single_iov_db(inputs, run_number, output_db): """ # find the IOV containing `run_number` - for record,tag in six.iteritems(inputs): + for record,tag in inputs.items(): run_is_covered = False for iov in reversed(tag["iovs"]): if iov <= run_number: @@ -40,7 +39,7 @@ def create_single_iov_db(inputs, run_number, output_db): result = {} remove_existing_object(output_db) - for record,tag in six.iteritems(inputs): + for record,tag in inputs.items(): result[record] = {"connect": "sqlite_file:"+output_db, "tag": "_".join([tag["tag"], tag["since"]])} diff --git a/Alignment/MillePedeAlignmentAlgorithm/scripts/mps_alisetup.py b/Alignment/MillePedeAlignmentAlgorithm/scripts/mps_alisetup.py index d7aafc93d6c93..6e213ee9d73ac 100755 --- a/Alignment/MillePedeAlignmentAlgorithm/scripts/mps_alisetup.py +++ b/Alignment/MillePedeAlignmentAlgorithm/scripts/mps_alisetup.py @@ -20,7 +20,6 @@ from Alignment.MillePedeAlignmentAlgorithm.alignmentsetup.helper import checked_out_MPS from functools import reduce -import six ################################################################################ def main(argv = None): @@ -262,7 +261,7 @@ def _create_mille_jobs(self): json_regex = re.compile('setupJson\s*\=\s*.*$', re.M) first_dataset = True - for name, dataset in six.iteritems(self._datasets): + for name, dataset in self._datasets.items(): print("="*75) # Build config from template/Fill in variables try: @@ -453,7 +452,7 @@ def _create_additional_pede_jobs(self): print("Properly set up the alignment before using the -w option.") sys.exit(1) - firstDataset = next(six.itervalues(self._datasets)) + firstDataset = next(iter(self._datasets.values())) config_template = firstDataset["configTemplate"] collection = firstDataset["collection"] @@ -517,7 +516,7 @@ def _create_input_db(self): run_number, input_db_name) self._override_gt = "" - for record,tag in six.iteritems(tags): + for record,tag in tags.items(): if self._override_gt == "": self._override_gt \ += ("\nimport " @@ -572,13 +571,13 @@ def _check_iov_definition(self): print(self._first_run, "!=", iovs[0]) sys.exit(1) - for inp in six.itervalues(inputs): + for inp in inputs.values(): inp["iovs"] = mps_tools.get_iovs(inp["connect"], inp["tag"]) # check consistency of input with output problematic_gt_inputs = {} input_indices = {key: len(value["iovs"]) -1 - for key,value in six.iteritems(inputs)} + for key,value in inputs.items()} for iov in reversed(iovs): for inp in inputs: if inputs[inp].pop("problematic", False): @@ -618,7 +617,7 @@ def _check_iov_definition(self): # check consistency of 'TrackerAlignmentRcd' with other inputs input_indices = {key: len(value["iovs"]) -1 - for key,value in six.iteritems(inputs) + for key,value in inputs.items() if (key != "TrackerAlignmentRcd") and (inp not in problematic_gt_inputs)} for iov in reversed(inputs["TrackerAlignmentRcd"]["iovs"]): @@ -680,7 +679,7 @@ def _fetch_defaults(self): if var == "testMode": continue print("No '" + var + "' given in [general] section.") - for dataset in six.itervalues(self._external_datasets): + for dataset in self._external_datasets.values(): dataset["general"] = {} for var in ("globaltag", "configTemplate", "json"): try: @@ -715,7 +714,7 @@ def _fetch_datasets(self): "weight": None} all_configs.update(self._external_datasets) - for config in six.itervalues(all_configs): + for config in all_configs.values(): global_weight = "1" if config["weight"] is None else config["weight"] if global_weight+self._config.config_path in self._common_weights: global_weight = self._common_weights[global_weight+ @@ -865,8 +864,8 @@ def _fetch_datasets(self): print("inputfilelist as the number of jobs.") # check if local weights override global weights and resolve name clashes - for weight_name, weight_values in six.iteritems(common_weights): - for key, weight in six.iteritems(weight_dict): + for weight_name, weight_values in common_weights.items(): + for key, weight in weight_dict.items(): if any([weight_name in w for w in weight]): self._common_weights[weight_name+config["config"].config_path] = weight_values self._weight_dict[key] = [mps_tools.replace_factors(w, diff --git a/Alignment/MillePedeAlignmentAlgorithm/scripts/mps_list_evts.py b/Alignment/MillePedeAlignmentAlgorithm/scripts/mps_list_evts.py index d792f582ea448..6e5c43efd2925 100755 --- a/Alignment/MillePedeAlignmentAlgorithm/scripts/mps_list_evts.py +++ b/Alignment/MillePedeAlignmentAlgorithm/scripts/mps_list_evts.py @@ -18,7 +18,6 @@ import sys -import six mps_db = "mps.db" # the mps.db file, default value @@ -66,7 +65,7 @@ def get_num_evts_per_merged_dataset(merged_datasets,num_evts_per_dataset): `merge_datasets' for an explanation of . """ num_evts_per_merged_dataset = {} - for merged_dataset,datasets in six.iteritems(merged_datasets): + for merged_dataset,datasets in merged_datasets.items(): num_evts = 0 for dataset in datasets: num_evts = num_evts + num_evts_per_dataset[dataset] @@ -106,7 +105,7 @@ def print_merging_scheme(merged_datasets): of what is meant by merged dataset. """ print("Defining the following merged datasets:") - for merged_dataset,datasets in six.iteritems(merged_datasets): + for merged_dataset,datasets in merged_datasets.items(): print("\n `"+merged_dataset+"' from:") for dataset in datasets: print(" `"+dataset+"'") diff --git a/Alignment/MillePedeAlignmentAlgorithm/scripts/mps_prepare_input_db.py b/Alignment/MillePedeAlignmentAlgorithm/scripts/mps_prepare_input_db.py index 9783a23b11205..aea379e36d0e9 100755 --- a/Alignment/MillePedeAlignmentAlgorithm/scripts/mps_prepare_input_db.py +++ b/Alignment/MillePedeAlignmentAlgorithm/scripts/mps_prepare_input_db.py @@ -3,7 +3,6 @@ import sys import argparse import Alignment.MillePedeAlignmentAlgorithm.mpslib.tools as mps_tools -import six ################################################################################ def main(argv = None): @@ -33,7 +32,7 @@ def main(argv = None): ["TrackerAlignmentRcd", "TrackerSurfaceDeformationRcd", "TrackerAlignmentErrorExtendedRcd"]) - for inp in six.itervalues(inputs): + for inp in inputs.values(): inp["iovs"] = mps_tools.get_iovs(inp["connect"], inp["tag"]) mps_tools.create_single_iov_db(inputs, args.run_number, args.output_db) diff --git a/Alignment/MillePedeAlignmentAlgorithm/scripts/mps_update.py b/Alignment/MillePedeAlignmentAlgorithm/scripts/mps_update.py index 957f49317ef29..7b10f74629082 100755 --- a/Alignment/MillePedeAlignmentAlgorithm/scripts/mps_update.py +++ b/Alignment/MillePedeAlignmentAlgorithm/scripts/mps_update.py @@ -6,7 +6,6 @@ import subprocess import Alignment.MillePedeAlignmentAlgorithm.mpslib.Mpslibclass as mpslib -import six def fill_time_info(mps_index, status, cpu_time): """Fill timing info in the database for `mps_index`. @@ -70,7 +69,7 @@ def fill_time_info(mps_index, status, cpu_time): job_status[job_id] = {"status": htcondor_jobstatus[status], "cpu": float(cpu_time)} - for job_id, job_info in six.iteritems(job_status): + for job_id, job_info in job_status.items(): mps_index = submitted_jobs.get(job_id, -1) # check for disabled Jobs disabled = "DISABLED" if "DISABLED" in lib.JOBSTATUS[mps_index] else "" @@ -118,7 +117,7 @@ def fill_time_info(mps_index, status, cpu_time): ################################################################################ # check for orphaned jobs -for job_id, mps_index in six.iteritems(submitted_jobs): +for job_id, mps_index in submitted_jobs.items(): for status in ("SETUP", "DONE", "FETCH", "TIMEL", "SUBTD"): if status in lib.JOBSTATUS[mps_index]: print("Funny entry index", mps_index, " job", lib.JOBID[mps_index], end=' ') diff --git a/Alignment/OfflineValidation/python/TkAlAllInOneTool/alignment.py b/Alignment/OfflineValidation/python/TkAlAllInOneTool/alignment.py index 0539bb4dc2a94..8520c1024a123 100644 --- a/Alignment/OfflineValidation/python/TkAlAllInOneTool/alignment.py +++ b/Alignment/OfflineValidation/python/TkAlAllInOneTool/alignment.py @@ -6,7 +6,6 @@ from . import configTemplates from .helperFunctions import parsecolor, parsestyle, replaceByMap, clean_name, getTagsMap from .TkAlExceptions import AllInOneError -import six class Alignment(object): condShorts = { @@ -246,7 +245,7 @@ def __getConditions( self, theConfig, theSection ): rcdnames = collections.Counter(condition["rcdName"] for condition in conditions) if rcdnames and max(rcdnames.values()) >= 2: raise AllInOneError("Some conditions are specified multiple times (possibly through mp or hp options)!\n" - + ", ".join(rcdname for rcdname, count in six.iteritems(rcdnames) if count >= 2)) + + ", ".join(rcdname for rcdname, count in rcdnames.items() if count >= 2)) for condition in conditions: self.__testDbExist(condition["connectString"], condition["tagName"]) diff --git a/Alignment/OfflineValidation/python/TkAlAllInOneTool/genericValidation.py b/Alignment/OfflineValidation/python/TkAlAllInOneTool/genericValidation.py index 7915e970b7b60..1049eb68d0270 100644 --- a/Alignment/OfflineValidation/python/TkAlAllInOneTool/genericValidation.py +++ b/Alignment/OfflineValidation/python/TkAlAllInOneTool/genericValidation.py @@ -10,7 +10,6 @@ from .dataset import Dataset from .helperFunctions import replaceByMap, addIndex, getCommandOutput2, boolfromstring, pythonboolstring from .TkAlExceptions import AllInOneError -from six import with_metaclass class ValidationMetaClass(ABCMeta): sets = ["mandatories", "optionals", "needpackages"] @@ -39,7 +38,7 @@ def __new__(cls, clsname, bases, dct): return super(ValidationMetaClass, cls).__new__(cls, clsname, bases, dct) -class GenericValidation(with_metaclass(ValidationMetaClass,object)): +class GenericValidation(object, metaclass=ValidationMetaClass): defaultReferenceName = "DEFAULT" mandatories = set() defaults = { diff --git a/Alignment/OfflineValidation/python/TkAlAllInOneTool/helperFunctions.py b/Alignment/OfflineValidation/python/TkAlAllInOneTool/helperFunctions.py index 1953c0d4c76c5..459894b721337 100644 --- a/Alignment/OfflineValidation/python/TkAlAllInOneTool/helperFunctions.py +++ b/Alignment/OfflineValidation/python/TkAlAllInOneTool/helperFunctions.py @@ -7,7 +7,6 @@ import sys from .TkAlExceptions import AllInOneError import CondCore.Utilities.conddblib as conddblib -import six ####################--- Helpers ---############################ def replaceByMap(target, the_map): @@ -28,7 +27,7 @@ def replaceByMap(target, the_map): result = result.replace(".oO["+key+"]Oo.",the_map[key]) except TypeError: #try a dict try: - for keykey, value in six.iteritems(the_map[key]): + for keykey, value in the_map[key].items(): result = result.replace(".oO[" + key + "['" + keykey + "']]Oo.", value) result = result.replace(".oO[" + key + '["' + keykey + '"]]Oo.', value) except AttributeError: #try a list @@ -157,12 +156,12 @@ def cache(function): cache = {} def newfunction(*args, **kwargs): try: - return cache[args, tuple(sorted(six.iteritems(kwargs)))] + return cache[args, tuple(sorted(kwargs.items()))] except TypeError: - print(args, tuple(sorted(six.iteritems(kwargs)))) + print(args, tuple(sorted(kwargs.items()))) raise except KeyError: - cache[args, tuple(sorted(six.iteritems(kwargs)))] = function(*args, **kwargs) + cache[args, tuple(sorted(kwargs.items()))] = function(*args, **kwargs) return newfunction(*args, **kwargs) newfunction.__name__ = function.__name__ return newfunction diff --git a/Alignment/OfflineValidation/python/TkAlAllInOneTool/plottingOptions.py b/Alignment/OfflineValidation/python/TkAlAllInOneTool/plottingOptions.py index c3610d8e8d3f9..258734faf4244 100644 --- a/Alignment/OfflineValidation/python/TkAlAllInOneTool/plottingOptions.py +++ b/Alignment/OfflineValidation/python/TkAlAllInOneTool/plottingOptions.py @@ -15,9 +15,8 @@ from .trackSplittingValidation import TrackSplittingValidation from .zMuMuValidation import ZMuMuValidation from .overlapValidation import OverlapValidation -from six import with_metaclass -class BasePlottingOptions(with_metaclass(ValidationMetaClass,object)): +class BasePlottingOptions(object, metaclass=ValidationMetaClass): defaults = { "cmssw" : os.environ["CMSSW_BASE"], "publicationstatus" : "", diff --git a/Alignment/OfflineValidation/scripts/validateAlignments.py b/Alignment/OfflineValidation/scripts/validateAlignments.py index d667fef3fb516..dbcc0756efa65 100755 --- a/Alignment/OfflineValidation/scripts/validateAlignments.py +++ b/Alignment/OfflineValidation/scripts/validateAlignments.py @@ -17,7 +17,6 @@ import pprint import re -import six import Alignment.OfflineValidation.TkAlAllInOneTool.configTemplates \ as configTemplates from Alignment.OfflineValidation.TkAlAllInOneTool.TkAlExceptions \ @@ -438,7 +437,7 @@ def runCondorJobs(outdir): with open("{}/validation.dagman".format(outdir), "w") as dagman: parents = {} - for (valType, valName, iov), alignments in six.iteritems(ValidationJob.condorConf): + for (valType, valName, iov), alignments in ValidationJob.condorConf.items(): parents[(valType, valName, iov)] = [] for jobInfo in alignments: @@ -456,7 +455,7 @@ def runCondorJobs(outdir): else: raise AllInOneError("Merge script '[%s]' not found!"%path) - for (valType, valName, iov), alignments in six.iteritems(ValidationJob.condorConf): + for (valType, valName, iov), alignments in ValidationJob.condorConf.items(): if len(parents[(valType, valName, iov)]) != 0: dagman.write('PARENT {} '.format(" ".join([parent for parent in parents[(valType, valName, iov)]])) + 'CHILD Merge_{}_{}_{}'.format(valType, valName, iov) + "\n") @@ -536,7 +535,7 @@ def createMergeScript( path, validations, options ): #pprint.pprint(comparisonLists) anythingToMerge = [] - for (validationtype, validationName, referenceName), validations in six.iteritems(comparisonLists): + for (validationtype, validationName, referenceName), validations in comparisonLists.items(): #pprint.pprint("validations") #pprint.pprint(validations) globalDictionaries.plottingOptions = {} diff --git a/CalibTracker/SiStripDCS/test/ManualO2OForRestart.py b/CalibTracker/SiStripDCS/test/ManualO2OForRestart.py index 2b4f0c2d4718b..d22e8ee443f6d 100755 --- a/CalibTracker/SiStripDCS/test/ManualO2OForRestart.py +++ b/CalibTracker/SiStripDCS/test/ManualO2OForRestart.py @@ -12,14 +12,13 @@ import datetime import subprocess import argparse -import six def insert_to_file(template, target, replace_dict): '''Update the template file based on the replace_dict, and write to the target.''' with open(template, 'r') as input_file: config=input_file.read() with open(target, 'w') as output_file: - for key, value in six.iteritems(replace_dict): + for key, value in replace_dict.items(): config = config.replace(key, value) output_file.write(config) diff --git a/CalibTracker/SiStripESProducers/test/python/SiStripBadAPVListBuilder_byHand_cfg.py b/CalibTracker/SiStripESProducers/test/python/SiStripBadAPVListBuilder_byHand_cfg.py index 3667cd05dfb4b..2eec6be9b56d6 100644 --- a/CalibTracker/SiStripESProducers/test/python/SiStripBadAPVListBuilder_byHand_cfg.py +++ b/CalibTracker/SiStripESProducers/test/python/SiStripBadAPVListBuilder_byHand_cfg.py @@ -1,5 +1,4 @@ import FWCore.ParameterSet.Config as cms -import six process = cms.Process("CALIB") #################################################### @@ -56,7 +55,7 @@ def getFileInPath(rfile): #print(detDict) APVsToKill = [] -for det,napv in six.iteritems(detDict): +for det,napv in detDict.items(): APVsToKill.append( cms.PSet( DetId = cms.uint32(int(det)), diff --git a/CommonTools/ParticleFlow/python/Isolation/customiseEarlyDeleteForCandIsoDeposits.py b/CommonTools/ParticleFlow/python/Isolation/customiseEarlyDeleteForCandIsoDeposits.py index c449117d88def..1be9772d2e824 100644 --- a/CommonTools/ParticleFlow/python/Isolation/customiseEarlyDeleteForCandIsoDeposits.py +++ b/CommonTools/ParticleFlow/python/Isolation/customiseEarlyDeleteForCandIsoDeposits.py @@ -1,7 +1,6 @@ import FWCore.ParameterSet.Config as cms import collections -import six def customiseEarlyDeleteForCandIsoDeposits(process, products): # Find the producers @@ -9,7 +8,7 @@ def customiseEarlyDeleteForCandIsoDeposits(process, products): def _branchName(productType, moduleLabel, instanceLabel=""): return "%s_%s_%s_%s" % (productType, moduleLabel, instanceLabel, process.name_()) - for name, module in six.iteritems(process.producers_()): + for name, module in process.producers_().items(): cppType = module._TypedParameterizable__type if cppType == "CandIsoDepositProducer": if module.ExtractorPSet.ComponentName in ["CandViewExtractor", "PFCandWithSuperClusterExtractor"] : diff --git a/CondCore/ESSources/test/python/load_record_empty_source_cfg.py b/CondCore/ESSources/test/python/load_record_empty_source_cfg.py index 231eda1f55008..a8e015d53d8fa 100644 --- a/CondCore/ESSources/test/python/load_record_empty_source_cfg.py +++ b/CondCore/ESSources/test/python/load_record_empty_source_cfg.py @@ -3,7 +3,6 @@ import FWCore.ParameterSet.Config as cms import FWCore.ParameterSet.VarParsing as VarParsing -import six options = VarParsing.VarParsing() options.register('connectionString', @@ -161,7 +160,7 @@ if process.schedule_() is not None: process.schedule_().append( process.esout ) -for name, module in six.iteritems(process.es_sources_()): +for name, module in process.es_sources_().items(): print("ESModules> provider:%s '%s'" % ( name, module.type_() )) -for name, module in six.iteritems(process.es_producers_()): +for name, module in process.es_producers_().items(): print("ESModules> provider:%s '%s'" % ( name, module.type_() )) diff --git a/CondCore/ESSources/test/python/loadall_from_gt_empty_source_cfg.py b/CondCore/ESSources/test/python/loadall_from_gt_empty_source_cfg.py index 15a7569597fd4..817fa168fdafe 100644 --- a/CondCore/ESSources/test/python/loadall_from_gt_empty_source_cfg.py +++ b/CondCore/ESSources/test/python/loadall_from_gt_empty_source_cfg.py @@ -4,7 +4,6 @@ import FWCore.ParameterSet.Config as cms import FWCore.ParameterSet.VarParsing as VarParsing from Configuration.AlCa.autoCond import autoCond -import six options = VarParsing.VarParsing() options.register('connectionString', @@ -157,7 +156,7 @@ if process.schedule_() is not None: process.schedule_().append( process.esout ) -for name, module in six.iteritems(process.es_sources_()): +for name, module in process.es_sources_().items(): print("ESModules> provider:%s '%s'" % ( name, module.type_() )) -for name, module in six.iteritems(process.es_producers_()): +for name, module in process.es_producers_().items(): print("ESModules> provider:%s '%s'" % ( name, module.type_() )) diff --git a/CondCore/ESSources/test/python/loadall_from_one_record_empty_source_cfg.py b/CondCore/ESSources/test/python/loadall_from_one_record_empty_source_cfg.py index 919607a996819..caac3c129ad02 100644 --- a/CondCore/ESSources/test/python/loadall_from_one_record_empty_source_cfg.py +++ b/CondCore/ESSources/test/python/loadall_from_one_record_empty_source_cfg.py @@ -4,7 +4,6 @@ import FWCore.ParameterSet.Config as cms import FWCore.ParameterSet.VarParsing as VarParsing from Configuration.AlCa.autoCond import autoCond -import six options = VarParsing.VarParsing() options.register('processId', @@ -168,7 +167,7 @@ #if process.schedule_() is not None: # process.schedule_().append( process.esout ) -for name, module in six.iteritems(process.es_sources_()): +for name, module in process.es_sources_().items(): print("ESModules> provider:%s '%s'" % ( name, module.type_() )) -for name, module in six.iteritems(process.es_producers_()): +for name, module in process.es_producers_().items(): print("ESModules> provider:%s '%s'" % ( name, module.type_() )) diff --git a/CondTools/BTau/python/checkBTagCalibrationConsistency.py b/CondTools/BTau/python/checkBTagCalibrationConsistency.py index cc112df33a634..5db9ddb02a527 100755 --- a/CondTools/BTau/python/checkBTagCalibrationConsistency.py +++ b/CondTools/BTau/python/checkBTagCalibrationConsistency.py @@ -8,7 +8,6 @@ from . import dataLoader import ROOT -import six data = None check_flavor = True @@ -139,7 +138,7 @@ def _check_sys_side(self, op, flav): assert len(sys_dict) == len(entries) sys_cent = sys_dict.pop('central', None) x = discr if op == 3 else pt - for syst, e in six.iteritems(sys_dict): + for syst, e in sys_dict.items(): sys_val = e.tf1_func.Eval(x) cent_val = sys_cent.tf1_func.Eval(x) if syst.startswith('up') and not sys_val > cent_val: diff --git a/CondTools/BTau/python/combineBTagCalibrationData.py b/CondTools/BTau/python/combineBTagCalibrationData.py index 7e88dd9cf679a..a581c357b3ac0 100755 --- a/CondTools/BTau/python/combineBTagCalibrationData.py +++ b/CondTools/BTau/python/combineBTagCalibrationData.py @@ -7,7 +7,6 @@ import itertools from . import checkBTagCalibrationConsistency as checker -import six def check_csv_data(csv_data): res = checker.run_check_csv(csv_data, False, False, False) @@ -41,7 +40,7 @@ def main(): print('\n' + '='*80) print('Checking consistency of individual input files...') print('='*80) - for fname, csv_data in six.iteritems(all_csv_data): + for fname, csv_data in all_csv_data.items(): print('\nChecking file:', fname) print('='*80) check_csv_data(csv_data) @@ -49,7 +48,7 @@ def main(): print('\n' + '='*80) print('Checking consistency of combinations...') print('='*80) - for one, two in itertools.combinations(six.iteritems(all_csv_data), 2): + for one, two in itertools.combinations(all_csv_data.items(), 2): print('\nChecking combination:', one[0], two[0]) print('='*80) check_csv_data(one[1] + two[1]) @@ -58,7 +57,7 @@ def main(): print('='*80) with open(sys.argv[-1], 'w') as f: f.write(header) - for csv_data in six.itervalues(all_csv_data): + for csv_data in all_csv_data.values(): f.write('\n') f.writelines(csv_data) diff --git a/CondTools/BTau/python/generateFlavCfromFlavB.py b/CondTools/BTau/python/generateFlavCfromFlavB.py index 42ea97b3a1ce5..665a500a48e50 100755 --- a/CondTools/BTau/python/generateFlavCfromFlavB.py +++ b/CondTools/BTau/python/generateFlavCfromFlavB.py @@ -8,7 +8,6 @@ from . import dataLoader from . import checkBTagCalibrationConsistency as checker -import six def generate_flav_c(loaded_data): flav_b_data = [e for e in loaded_data.entries if e.params.jetFlavor == 0] @@ -41,7 +40,7 @@ def gen_flavb_csv_line(dicts): central = d.pop('central') central.params.jetFlavor = 1 yield central.makeCSVLine() - for e in six.itervalues(d): + for e in d.values(): e.params.jetFlavor = 1 e.formula = '2*(%s)-(%s)' % (e.formula, central.formula) yield e.makeCSVLine() diff --git a/CondTools/SiStrip/python/o2o_helper.py b/CondTools/SiStrip/python/o2o_helper.py index d3e7a232628ba..dc0672fc7d270 100644 --- a/CondTools/SiStrip/python/o2o_helper.py +++ b/CondTools/SiStrip/python/o2o_helper.py @@ -11,7 +11,6 @@ from email.mime.multipart import MIMEMultipart from email.mime.text import MIMEText import sqlite3 -import six def kill_subproc_noexcept(p): '''Kill a subprocess without throwing OSError. @@ -48,7 +47,7 @@ def insert_to_file(template, target, replace_dict): with open(template, 'r') as input_file: config=input_file.read() with open(target, 'w') as output_file: - for key, value in six.iteritems(replace_dict): + for key, value in replace_dict.items(): config = config.replace(key, value) output_file.write(config) return config diff --git a/Configuration/AlCa/python/GlobalTag.py b/Configuration/AlCa/python/GlobalTag.py index bcc2347b7d9f1..1910449e29fff 100644 --- a/Configuration/AlCa/python/GlobalTag.py +++ b/Configuration/AlCa/python/GlobalTag.py @@ -1,6 +1,5 @@ import FWCore.ParameterSet.Config as cms import sys -import six def checkPrefix(mainList, inputGTParams): """ Compares two input GTs to see if they have the same prefix. Returns the index in the internal list of GTs of the match @@ -131,7 +130,7 @@ def GlobalTag(essource = None, globaltag = None, conditions = None): # explicit payloads toGet from DB if custom_conditions: - for ( (record, label), (tag, connection, snapshotTime) ) in sorted(six.iteritems(custom_conditions)): + for ( (record, label), (tag, connection, snapshotTime) ) in sorted(custom_conditions.items()): payload = cms.PSet() payload.record = cms.string( record ) if label: diff --git a/Configuration/AlCa/python/autoCondModifiers.py b/Configuration/AlCa/python/autoCondModifiers.py index 2c6254be7e587..bb90f15a9876c 100644 --- a/Configuration/AlCa/python/autoCondModifiers.py +++ b/Configuration/AlCa/python/autoCondModifiers.py @@ -1,7 +1,6 @@ ## ## Append for 0T conditions ## -import six from Configuration.StandardSequences.CondDBESSource_cff import GlobalTag as essource connectionString = essource.connect.value() @@ -10,7 +9,7 @@ def autoCond0T(autoCond): ConditionsFor0T = ','.join( ['RunInfo_0T_v1_mc', "RunInfoRcd", connectionString, "", "2020-07-01 12:00:00.000"] ) GlobalTags0T = {} - for key,val in six.iteritems(autoCond): + for key,val in autoCond.items(): if "phase" in key: # restrict to phase1 upgrade GTs GlobalTags0T[key+"_0T"] = (autoCond[key], ConditionsFor0T) @@ -25,7 +24,7 @@ def autoCondHLTHI(autoCond): FullPedestalsForHLTHI = ','.join( ['SiStripFullPedestals_GR10_v1_hlt', "SiStripPedestalsRcd", connectionString, "", "2021-03-11 12:00:00.000"] ) MenuForHLTHI = ','.join( ['L1Menu_CollisionsHeavyIons2015_v5_uGT_xml', "L1TUtmTriggerMenuRcd", connectionString, "", "2021-03-11 12:00:00.000"] ) - for key,val in six.iteritems(autoCond): + for key,val in autoCond.items(): if key == 'run2_hlt_relval': # modification of HLT relval GT GlobalTagsHLTHI['run2_hlt_hi'] = (autoCond[key], FullPedestalsForHLTHI, MenuForHLTHI) diff --git a/Configuration/AlCa/python/autoCondPhase2.py b/Configuration/AlCa/python/autoCondPhase2.py index cae53c3a1acc0..c7962d37f9835 100644 --- a/Configuration/AlCa/python/autoCondPhase2.py +++ b/Configuration/AlCa/python/autoCondPhase2.py @@ -1,4 +1,3 @@ -import six from Configuration.StandardSequences.CondDBESSource_cff import GlobalTag as essource connectionString = essource.connect.value() @@ -131,7 +130,7 @@ # method called in autoCond def autoCondPhase2(autoCond): - for key,val in six.iteritems(phase2GTs): + for key,val in phase2GTs.items(): if len(val)==1 : autoCond[key] = ( autoCond[val[0]] ) else: diff --git a/Configuration/Applications/python/ConfigBuilder.py b/Configuration/Applications/python/ConfigBuilder.py index c2eeec132e969..0d8d7617471cc 100644 --- a/Configuration/Applications/python/ConfigBuilder.py +++ b/Configuration/Applications/python/ConfigBuilder.py @@ -6,7 +6,6 @@ import FWCore.ParameterSet.Config as cms from FWCore.ParameterSet.Modules import _Module -import six # The following import is provided for backward compatibility reasons. # The function used to be defined in this file. from FWCore.ParameterSet.MassReplace import massReplaceInputTag as MassReplaceInputTag @@ -1540,7 +1539,7 @@ def prepare_HLT(self, sequence = None): optionsForHLT['type'] = 'HIon' else: optionsForHLT['type'] = 'GRun' - optionsForHLTConfig = ', '.join('%s=%s' % (key, repr(val)) for (key, val) in six.iteritems(optionsForHLT)) + optionsForHLTConfig = ', '.join('%s=%s' % (key, repr(val)) for (key, val) in optionsForHLT.items()) if sequence == 'run,fromSource': if hasattr(self.process.source,'firstRun'): self.executeAndRemember('process.loadHltConfiguration("run:%%d"%%(process.source.firstRun.value()),%s)'%(optionsForHLTConfig)) diff --git a/Configuration/Geometry/test/BuildFile.xml b/Configuration/Geometry/test/BuildFile.xml index 23d2cc2bad224..c96ff1eb2e1e7 100644 --- a/Configuration/Geometry/test/BuildFile.xml +++ b/Configuration/Geometry/test/BuildFile.xml @@ -1,4 +1,4 @@ - - + + diff --git a/Configuration/HLT/python/autoCondHLT.py b/Configuration/HLT/python/autoCondHLT.py index f79c5f2d6f3a4..242fb3525da1b 100644 --- a/Configuration/HLT/python/autoCondHLT.py +++ b/Configuration/HLT/python/autoCondHLT.py @@ -62,8 +62,7 @@ } def autoCondHLT(autoCond): - import six - for key,val in six.iteritems(hltGTs): + for key,val in hltGTs.items(): if len(val)==1 : autoCond[key] = ( autoCond[val[0]] ) else: diff --git a/Configuration/PyReleaseValidation/python/relval_machine.py b/Configuration/PyReleaseValidation/python/relval_machine.py index 884f9998564ae..66bd00577629d 100644 --- a/Configuration/PyReleaseValidation/python/relval_machine.py +++ b/Configuration/PyReleaseValidation/python/relval_machine.py @@ -4,7 +4,6 @@ import json import collections -import six workflows = Matrix() steps = Steps() @@ -41,7 +40,7 @@ def convert_keys_to_string(dictionary): if isinstance(dictionary, str): return str(dictionary) elif isinstance(dictionary, collections.Mapping): - return dict(map(convert_keys_to_string, six.iteritems(dictionary))) + return dict(map(convert_keys_to_string, dictionary.items())) elif isinstance(dictionary, collections.Iterable): return type(dictionary)(map(convert_keys_to_string, dictionary)) else: diff --git a/Configuration/PyReleaseValidation/python/relval_steps.py b/Configuration/PyReleaseValidation/python/relval_steps.py index 17a98f05d6cf3..8783dc88aab5f 100644 --- a/Configuration/PyReleaseValidation/python/relval_steps.py +++ b/Configuration/PyReleaseValidation/python/relval_steps.py @@ -1,6 +1,5 @@ from __future__ import absolute_import from .MatrixUtil import * -import six from Configuration.HLT.autoHLT import autoHLT from Configuration.AlCa.autoPCL import autoPCL @@ -3313,7 +3312,7 @@ def gen2021HiMix(fragment,howMuch): upgradeStepDict={} -for specialType,specialWF in six.iteritems(upgradeWFs): +for specialType,specialWF in upgradeWFs.items(): specialWF.init(upgradeStepDict) # just make all combinations - yes, some will be nonsense.. but then these are not used unless specified above @@ -3465,12 +3464,12 @@ def gen2021HiMix(fragment,howMuch): } # setup baseline and variations - for specialType,specialWF in six.iteritems(upgradeWFs): + for specialType,specialWF in upgradeWFs.items(): specialWF.setup(upgradeStepDict, k, upgradeProperties[year][k]) # setup PU if k2 in PUDataSets: - for specialType,specialWF in six.iteritems(upgradeWFs): + for specialType,specialWF in upgradeWFs.items(): for step in specialWF.PU: stepName = specialWF.getStepName(step) stepNamePU = specialWF.getStepNamePU(step) @@ -3485,7 +3484,7 @@ def gen2021HiMix(fragment,howMuch): for step in upgradeStepDict.keys(): # we need to do this for each fragment if 'Sim' in step or 'Premix' in step: - for frag,info in six.iteritems(upgradeFragments): + for frag,info in upgradeFragments.items(): howMuch=info.howMuch for key in [key for year in upgradeKeys for key in upgradeKeys[year]]: k=frag[:-4]+'_'+key+'_'+step diff --git a/Configuration/PyReleaseValidation/python/relval_upgrade.py b/Configuration/PyReleaseValidation/python/relval_upgrade.py index e1fd636c5f613..d25ad0787661b 100644 --- a/Configuration/PyReleaseValidation/python/relval_upgrade.py +++ b/Configuration/PyReleaseValidation/python/relval_upgrade.py @@ -1,4 +1,3 @@ -import six # import the definition of the steps and input files: from Configuration.PyReleaseValidation.relval_steps import * @@ -20,7 +19,7 @@ def makeStepName(key,frag,step,suffix): for year in upgradeKeys: for i,key in enumerate(upgradeKeys[year]): numWF=numWFAll[year][i] - for frag,info in six.iteritems(upgradeFragments): + for frag,info in upgradeFragments.items(): # phase2-specific fragments are skipped in phase1 if ("CE_E" in frag or "CE_H" in frag) and year==2017: numWF += 1 @@ -38,7 +37,7 @@ def makeStepName(key,frag,step,suffix): if 'HARVEST' in step: hasHarvest = True - for specialType,specialWF in six.iteritems(upgradeWFs): + for specialType,specialWF in upgradeWFs.items(): if (specialType != 'baseline') and ( ('PU' in step and step.replace('PU','') in specialWF.PU) or (step in specialWF.steps) ): stepList[specialType].append(stepMaker(key,frag[:-4],step,specialWF.suffix)) # hack to add an extra step @@ -58,7 +57,7 @@ def makeStepName(key,frag,step,suffix): else: stepList[specialType].append(stepMaker(key,frag[:-4],step,'')) - for specialType,specialWF in six.iteritems(upgradeWFs): + for specialType,specialWF in upgradeWFs.items(): # remove other steps for premixS1 if specialType=="PMXS1": stepList[specialType] = stepList[specialType][:1] diff --git a/Configuration/PyReleaseValidation/python/upgradeWorkflowComponents.py b/Configuration/PyReleaseValidation/python/upgradeWorkflowComponents.py index b2ffc7a0c4577..d883afcf94f94 100644 --- a/Configuration/PyReleaseValidation/python/upgradeWorkflowComponents.py +++ b/Configuration/PyReleaseValidation/python/upgradeWorkflowComponents.py @@ -1,6 +1,5 @@ from copy import copy, deepcopy from collections import OrderedDict -import six from .MatrixUtil import merge, Kby import re @@ -1073,7 +1072,7 @@ def condition(self, fragment, stepList, key, hasHarvest): ) # check for duplicate offsets -offsets = [specialWF.offset for specialType,specialWF in six.iteritems(upgradeWFs)] +offsets = [specialWF.offset for specialType,specialWF in upgradeWFs.items()] seen = set() dups = set(x for x in offsets if x in seen or seen.add(x)) if len(dups)>0: diff --git a/Configuration/StandardSequences/python/earlyDeleteSettings_cff.py b/Configuration/StandardSequences/python/earlyDeleteSettings_cff.py index e2a8c9a228f43..ffa5e87fd7dff 100644 --- a/Configuration/StandardSequences/python/earlyDeleteSettings_cff.py +++ b/Configuration/StandardSequences/python/earlyDeleteSettings_cff.py @@ -6,7 +6,6 @@ from RecoTracker.Configuration.customiseEarlyDeleteForSeeding import customiseEarlyDeleteForSeeding from CommonTools.ParticleFlow.Isolation.customiseEarlyDeleteForCandIsoDeposits import customiseEarlyDeleteForCandIsoDeposits -import six def _hasInputTagModuleLabel(process, pset, psetModLabel, moduleLabels, result): for name in pset.parameterNames_(): @@ -52,14 +51,14 @@ def customiseEarlyDelete(process): process.options.canDeleteEarly = cms.untracked.vstring() branchSet = set() - for branches in six.itervalues(products): + for branches in products.values(): for branch in branches: branchSet.add(branch) branchList = sorted(branchSet) process.options.canDeleteEarly.extend(branchList) # LogErrorHarvester should not wait for deleted items - for prod in six.itervalues(process.producers_()): + for prod in process.producers_().values(): if prod.type_() == "LogErrorHarvester": if not hasattr(prod,'excludeModules'): prod.excludeModules = cms.untracked.vstring() @@ -70,12 +69,12 @@ def customiseEarlyDelete(process): # Find the consumers producers=[] branchesList=[] - for producer, branches in six.iteritems(products): + for producer, branches in products.items(): producers.append(producer) branchesList.append(branches) for moduleType in [process.producers_(), process.filters_(), process.analyzers_()]: - for name, module in six.iteritems(moduleType): + for name, module in moduleType.items(): result=[] for producer in producers: result.append(False) diff --git a/DQM/Integration/scripts/fileregistration/mergeAndRegister.py b/DQM/Integration/scripts/fileregistration/mergeAndRegister.py index d6f8f4ef4a7c2..e57040f9c4e2f 100755 --- a/DQM/Integration/scripts/fileregistration/mergeAndRegister.py +++ b/DQM/Integration/scripts/fileregistration/mergeAndRegister.py @@ -7,7 +7,6 @@ import smtplib from email.MIMEText import MIMEText from ROOT import TFile -import six def sendmail(EmailAddress,run): s=smtplib.SMTP("localhost") @@ -68,7 +67,7 @@ def filecheck(rootfile): #### loop for runs newFiles = [] allOldFiles = [] -for (run, files) in six.iteritems(NEW): +for (run, files) in NEW.items(): runnr = "%09d" % long(run) destdir = "%s/%s/%s/%s" % (FILEDIR, runnr[0:3], runnr[3:6], runnr[6:9]) oldfiles = [] diff --git a/DQM/Integration/scripts/harvesting_tools/cmsHarvester.py b/DQM/Integration/scripts/harvesting_tools/cmsHarvester.py index 710766fcf1ab5..afb6e2d723944 100755 --- a/DQM/Integration/scripts/harvesting_tools/cmsHarvester.py +++ b/DQM/Integration/scripts/harvesting_tools/cmsHarvester.py @@ -99,7 +99,6 @@ from inspect import getargspec from random import choice -import six # These we need to communicate with DBS global DBSAPI from DBSAPI.dbsApi import DbsApi @@ -1445,7 +1444,7 @@ def create_and_check_castor_dirs(self): # Now call the checker for all (unique) subdirs. castor_dirs = [] - for (dataset_name, runs) in six.iteritems(self.datasets_to_use): + for (dataset_name, runs) in self.datasets_to_use.items(): for run in runs: castor_dirs.append(self.datasets_information[dataset_name] \ @@ -2310,7 +2309,7 @@ def check_input_status(self): ### # Dump some info about the Frontier connections used. - for (key, value) in six.iteritems(self.frontier_connection_name): + for (key, value) in self.frontier_connection_name.items(): frontier_type_str = "unknown" if key == "globaltag": frontier_type_str = "the GlobalTag" @@ -2924,7 +2923,7 @@ def dbs_resolve_number_of_events(self, dataset_name, run_number=None): ## # Now translate this into a slightly more usable mapping. ## sites = {} -## for (run_number, site_info) in six.iteritems(sample_info): +## for (run_number, site_info) in sample_info.items(): ## # Quick-n-dirty trick to see if all file counts are the ## # same. ## unique_file_counts = set([i[1] for i in site_info]) @@ -3043,7 +3042,7 @@ def dbs_resolve_number_of_events(self, dataset_name, run_number=None): ## # Now translate this into a slightly more usable mapping. ## sites = {} -## for (run_number, site_info) in six.iteritems(sample_info): +## for (run_number, site_info) in sample_info.items(): ## # Quick-n-dirty trick to see if all file counts are the ## # same. ## unique_file_counts = set([i[1] for i in site_info]) @@ -4022,7 +4021,7 @@ def check_dataset_list(self): # If we emptied out a complete dataset, remove the whole # thing. dataset_names_after_checks_tmp = copy.deepcopy(dataset_names_after_checks) - for (dataset_name, runs) in six.iteritems(dataset_names_after_checks): + for (dataset_name, runs) in dataset_names_after_checks.items(): if len(runs) < 1: self.logger.warning(" Removing dataset without any runs " \ "(left) `%s'" % \ @@ -4868,7 +4867,7 @@ def create_harvesting_config(self, dataset_name): # exists. customisations.append("print \"Not using reference histograms\"") customisations.append("if hasattr(process, \"dqmRefHistoRootFileGetter\"):") - customisations.append(" for (sequence_name, sequence) in six.iteritems(process.sequences):") + customisations.append(" for (sequence_name, sequence) in process.sequences.items():") customisations.append(" if sequence.remove(process.dqmRefHistoRootFileGetter):") customisations.append(" print \"Removed process.dqmRefHistoRootFileGetter from sequence `%s'\" % \\") customisations.append(" sequence_name") @@ -5270,7 +5269,7 @@ def load_ref_hist_mappings(self): self.logger.info(" Successfully loaded %d mapping(s)" % \ len(self.ref_hist_mappings)) max_len = max([len(i) for i in self.ref_hist_mappings.keys()]) - for (map_from, map_to) in six.iteritems(self.ref_hist_mappings): + for (map_from, map_to) in self.ref_hist_mappings.items(): self.logger.info(" %-*s -> %s" % \ (max_len, map_from, map_to)) diff --git a/DQM/SiStripMonitorClient/scripts/TH2PolyOfflineMaps.py b/DQM/SiStripMonitorClient/scripts/TH2PolyOfflineMaps.py index ea45286173d46..417e1d8d2b2d9 100755 --- a/DQM/SiStripMonitorClient/scripts/TH2PolyOfflineMaps.py +++ b/DQM/SiStripMonitorClient/scripts/TH2PolyOfflineMaps.py @@ -6,7 +6,6 @@ from ROOT import * from copy import deepcopy from array import array -import six gROOT.SetBatch() # don't pop up canvases @@ -313,7 +312,7 @@ def __init__(self, inputDQMName, outputDirName, minMaxFileName, limits, modDicN # init internal data structure self.internalData.update({int(items[0]) : {}}) - self.rawToOnlineDict = dict((v,k) for k,v in six.iteritems(self.detDict)) + self.rawToOnlineDict = dict((v,k) for k,v in self.detDict.items()) self.__GroupHistograms() diff --git a/DQM/TrackerRemapper/test/PrintTkMap.py b/DQM/TrackerRemapper/test/PrintTkMap.py index c60474f267947..c380ef890f7d7 100644 --- a/DQM/TrackerRemapper/test/PrintTkMap.py +++ b/DQM/TrackerRemapper/test/PrintTkMap.py @@ -6,7 +6,6 @@ from ROOT import * from copy import deepcopy from array import array -import six gROOT.SetBatch() # don't pop up canvases diff --git a/DQM/TrackingMonitorSource/python/TrackingSourceConfig_Tier0_cff.py b/DQM/TrackingMonitorSource/python/TrackingSourceConfig_Tier0_cff.py index 301e64ec598f2..7d0649a3b6316 100644 --- a/DQM/TrackingMonitorSource/python/TrackingSourceConfig_Tier0_cff.py +++ b/DQM/TrackingMonitorSource/python/TrackingSourceConfig_Tier0_cff.py @@ -1,7 +1,6 @@ import FWCore.ParameterSet.Config as cms import RecoTracker.IterativeTracking.iterativeTkConfig as _cfg import RecoTracker.IterativeTracking.iterativeTkUtils as _utils -import six ### load which are the tracks collection 2 be monitored from DQM.TrackingMonitorSource.TrackCollections2monitor_cff import * @@ -260,7 +259,7 @@ def _copyIfExists(mod, pset, name): if hasattr(pset, name): setattr(mod, name, getattr(pset, name)) -for _step, _pset in six.iteritems(seedMonitoring): +for _step, _pset in seedMonitoring.items(): _mod = DQM.TrackingMonitor.TrackingMonitorSeed_cfi.TrackMonSeed.clone( doTrackCandHistos = cms.bool(True) ) diff --git a/DQMOffline/Configuration/scripts/cmsswConfigtrace.py b/DQMOffline/Configuration/scripts/cmsswConfigtrace.py index b5ad57f4c9929..a7e416d534eca 100755 --- a/DQMOffline/Configuration/scripts/cmsswConfigtrace.py +++ b/DQMOffline/Configuration/scripts/cmsswConfigtrace.py @@ -2,7 +2,6 @@ from __future__ import print_function import os import re -import six import sys import inspect import sqlite3 @@ -116,7 +115,7 @@ def new_items_(self): #items += self.moduleItems_() items += self.outputModules.items() #items += self.sequences.items() # TODO: we don't need sequences that are not paths? - items += six.iteritems(self.paths) + items += self.paths.items() items += self.endpaths.items() items += self.services.items() items += self.es_producers.items() diff --git a/DQMOffline/Configuration/scripts/cmsswFiletrace.py b/DQMOffline/Configuration/scripts/cmsswFiletrace.py index d6f30894f0f1a..e505aefdf32f0 100755 --- a/DQMOffline/Configuration/scripts/cmsswFiletrace.py +++ b/DQMOffline/Configuration/scripts/cmsswFiletrace.py @@ -10,7 +10,6 @@ from collections import defaultdict # only needed to locate CMSSW -import six import FWCore import FWCore.ParameterSet.Types @@ -21,7 +20,6 @@ WRAP_SCRIPTS = ["cmsDriver.py" ] IGNORE_DIRS = [ os.path.dirname(os.__file__), - os.path.dirname(six.__file__), FWCore.ParameterSet.Types.__file__, ] STRIPPATHS = [ # we will add the base dir from CMSSWCALLBASE env var here diff --git a/DQMOffline/L1Trigger/python/L1TCommon.py b/DQMOffline/L1Trigger/python/L1TCommon.py index 51d2f8f0d4880..f7b00a552cda9 100644 --- a/DQMOffline/L1Trigger/python/L1TCommon.py +++ b/DQMOffline/L1Trigger/python/L1TCommon.py @@ -1,10 +1,9 @@ -import six def generateEfficiencyStrings(variables, plots): stringTemplate = "{plot} " + \ "'{var} efficiency; Offline E_{{T}}^{{miss}} (GeV); {var} efficiency'" + \ " {num_path} {den_path}" - for variable, thresholds in six.iteritems(variables): + for variable, thresholds in variables.items(): for plot in plots[variable]: for threshold in thresholds: plotName = '{0}_threshold_{1}'.format(plot, threshold) diff --git a/DQMOffline/L1Trigger/python/L1TEGammaDiff_cfi.py b/DQMOffline/L1Trigger/python/L1TEGammaDiff_cfi.py index ba7426b88f5a9..660c105d13c75 100644 --- a/DQMOffline/L1Trigger/python/L1TEGammaDiff_cfi.py +++ b/DQMOffline/L1Trigger/python/L1TEGammaDiff_cfi.py @@ -1,6 +1,5 @@ import FWCore.ParameterSet.Config as cms from DQMOffline.L1Trigger import L1TEGammaOffline_cfi -import six variables = { 'electron': L1TEGammaOffline_cfi.electronEfficiencyThresholds, @@ -15,7 +14,7 @@ allEfficiencyPlots = [] add_plot = allEfficiencyPlots.append -for variable, thresholds in six.iteritems(variables): +for variable, thresholds in variables.items(): for plot in plots[variable]: for threshold in thresholds: plotName = '{0}_threshold_{1}'.format(plot, threshold) @@ -70,7 +69,7 @@ allEfficiencyPlots_HI = [] add_plot = allEfficiencyPlots_HI.append -for variable, thresholds in six.iteritems(variables_HI): +for variable, thresholds in variables_HI.items(): for plot in plots[variable]: for threshold in thresholds: plotName = '{0}_threshold_{1}'.format(plot, threshold) diff --git a/DQMOffline/L1Trigger/python/L1TEGammaEfficiency_cfi.py b/DQMOffline/L1Trigger/python/L1TEGammaEfficiency_cfi.py index 3a39c0173284c..de7328743845a 100644 --- a/DQMOffline/L1Trigger/python/L1TEGammaEfficiency_cfi.py +++ b/DQMOffline/L1Trigger/python/L1TEGammaEfficiency_cfi.py @@ -1,6 +1,5 @@ import FWCore.ParameterSet.Config as cms from DQMOffline.L1Trigger import L1TEGammaOffline_cfi -import six variables = { 'electron': L1TEGammaOffline_cfi.electronEfficiencyThresholds, diff --git a/DQMOffline/L1Trigger/python/L1TEtSumDiff_cfi.py b/DQMOffline/L1Trigger/python/L1TEtSumDiff_cfi.py index 2ec1be10d7706..2191f7920d8e5 100644 --- a/DQMOffline/L1Trigger/python/L1TEtSumDiff_cfi.py +++ b/DQMOffline/L1Trigger/python/L1TEtSumDiff_cfi.py @@ -1,6 +1,5 @@ import FWCore.ParameterSet.Config as cms from DQMOffline.L1Trigger import L1TEtSumJetOffline_cfi as L1TStep1 -import six variables = { 'met': L1TStep1.metEfficiencyThresholds, @@ -18,7 +17,7 @@ allEfficiencyPlots = [] add_plot = allEfficiencyPlots.append -for variable, thresholds in six.iteritems(variables): +for variable, thresholds in variables.items(): for plot in plots[variable]: for threshold in thresholds: plotName = '{0}_threshold_{1}'.format(plot, threshold) @@ -61,7 +60,7 @@ allEfficiencyPlots_HI = [] add_plot = allEfficiencyPlots_HI.append -for variable, thresholds in six.iteritems(variables_HI): +for variable, thresholds in variables_HI.items(): for plot in plots[variable]: for threshold in thresholds: plotName = '{0}_threshold_{1}'.format(plot, threshold) diff --git a/DQMOffline/L1Trigger/python/L1TEtSumEfficiency_cfi.py b/DQMOffline/L1Trigger/python/L1TEtSumEfficiency_cfi.py index c81e6185b037a..cdb8352b67d39 100644 --- a/DQMOffline/L1Trigger/python/L1TEtSumEfficiency_cfi.py +++ b/DQMOffline/L1Trigger/python/L1TEtSumEfficiency_cfi.py @@ -1,6 +1,5 @@ import FWCore.ParameterSet.Config as cms from DQMOffline.L1Trigger import L1TEtSumJetOffline_cfi as L1TStep1 -import six variables = { 'met': L1TStep1.metEfficiencyThresholds, diff --git a/DQMOffline/L1Trigger/python/L1TJetDiff_cfi.py b/DQMOffline/L1Trigger/python/L1TJetDiff_cfi.py index 63a9c49aeee1b..f63948dd7aadc 100644 --- a/DQMOffline/L1Trigger/python/L1TJetDiff_cfi.py +++ b/DQMOffline/L1Trigger/python/L1TJetDiff_cfi.py @@ -1,6 +1,5 @@ import FWCore.ParameterSet.Config as cms from DQMOffline.L1Trigger import L1TEtSumJetOffline_cfi as L1TStep1 -import six variables = { 'jet': L1TStep1.jetEfficiencyThresholds, @@ -14,7 +13,7 @@ allEfficiencyPlots = [] add_plot = allEfficiencyPlots.append -for variable, thresholds in six.iteritems(variables): +for variable, thresholds in variables.items(): for plot in plots[variable]: for threshold in thresholds: plotName = '{0}_threshold_{1}'.format(plot, threshold) @@ -56,7 +55,7 @@ allEfficiencyPlots_HI = [] add_plot = allEfficiencyPlots_HI.append -for variable, thresholds in six.iteritems(variables_HI): +for variable, thresholds in variables_HI.items(): for plot in plots[variable]: for threshold in thresholds: plotName = '{0}_threshold_{1}'.format(plot, threshold) diff --git a/DQMOffline/L1Trigger/python/L1TJetEfficiency_cfi.py b/DQMOffline/L1Trigger/python/L1TJetEfficiency_cfi.py index c2bf9d5e66674..6f41a5288e2e0 100644 --- a/DQMOffline/L1Trigger/python/L1TJetEfficiency_cfi.py +++ b/DQMOffline/L1Trigger/python/L1TJetEfficiency_cfi.py @@ -1,6 +1,5 @@ import FWCore.ParameterSet.Config as cms from DQMOffline.L1Trigger import L1TEtSumJetOffline_cfi as L1TStep1 -import six variables = { 'jet': L1TStep1.jetEfficiencyThresholds, diff --git a/DQMOffline/L1Trigger/python/L1TTauDiff_cfi.py b/DQMOffline/L1Trigger/python/L1TTauDiff_cfi.py index 18eaac464f533..e4d0ef115be1b 100644 --- a/DQMOffline/L1Trigger/python/L1TTauDiff_cfi.py +++ b/DQMOffline/L1Trigger/python/L1TTauDiff_cfi.py @@ -1,6 +1,5 @@ import FWCore.ParameterSet.Config as cms from DQMOffline.L1Trigger import L1TTauOffline_cfi -import six variables = { 'tau': L1TTauOffline_cfi.tauEfficiencyThresholds, @@ -19,7 +18,7 @@ allEfficiencyPlots = [] add_plot = allEfficiencyPlots.append -for variable, thresholds in six.iteritems(variables): +for variable, thresholds in variables.items(): for plot in plots[variable]: for threshold in thresholds: plotName = '{0}_threshold_{1}'.format(plot, threshold) diff --git a/DQMOffline/L1Trigger/python/L1TTauEfficiency_cfi.py b/DQMOffline/L1Trigger/python/L1TTauEfficiency_cfi.py index 55968d0ff0c9d..f4772459c6024 100644 --- a/DQMOffline/L1Trigger/python/L1TTauEfficiency_cfi.py +++ b/DQMOffline/L1Trigger/python/L1TTauEfficiency_cfi.py @@ -1,6 +1,5 @@ import FWCore.ParameterSet.Config as cms from DQMOffline.L1Trigger import L1TTauOffline_cfi -import six variables = { 'tau': L1TTauOffline_cfi.tauEfficiencyThresholds, diff --git a/FWCore/Concurrency/scripts/edmStreamStallGrapher.py b/FWCore/Concurrency/scripts/edmStreamStallGrapher.py index 497f0d8bf09be..a90987e45fd0a 100755 --- a/FWCore/Concurrency/scripts/edmStreamStallGrapher.py +++ b/FWCore/Concurrency/scripts/edmStreamStallGrapher.py @@ -5,7 +5,6 @@ from operator import attrgetter,itemgetter import sys from collections import defaultdict -import six #---------------------------------------------- def printHelp(): s = ''' @@ -163,7 +162,7 @@ def __init__(self,f): self.numStreams =numStreams self._moduleNames = moduleNames self.maxNameSize =0 - for n in six.iteritems(moduleNames): + for n in moduleNames.items(): self.maxNameSize = max(self.maxNameSize,len(n)) self.maxNameSize = max(self.maxNameSize,len(kSourceDelayedRead)) @@ -416,7 +415,7 @@ def createAsciiImage(processingSteps, numStreams, maxNameSize): def printStalledModulesInOrder(stalledModules): priorities = [] maxNameSize = 0 - for name,t in six.iteritems(stalledModules): + for name,t in stalledModules.items(): maxNameSize = max(maxNameSize, len(name)) t.sort(reverse=True) priorities.append((name,sum(t),t)) diff --git a/FWCore/Modules/python/customiseCheckEventSetup.py b/FWCore/Modules/python/customiseCheckEventSetup.py index 928ef92477b26..691f2afa1769b 100644 --- a/FWCore/Modules/python/customiseCheckEventSetup.py +++ b/FWCore/Modules/python/customiseCheckEventSetup.py @@ -1,6 +1,5 @@ from __future__ import print_function import FWCore.ParameterSet.Config as cms -import six def customise(process): process.escontent = cms.EDAnalyzer("PrintEventSetupContent", @@ -15,9 +14,9 @@ def customise(process): if process.schedule_() is not None: process.schedule_().append(process.esout) - for name, module in six.iteritems(process.es_sources_()): + for name, module in process.es_sources_().items(): print("ESModules> provider:%s '%s'" % (name, module.type_())) - for name, module in six.iteritems(process.es_producers_()): + for name, module in process.es_producers_().items(): print("ESModules> provider:%s '%s'" % (name, module.type_())) return process diff --git a/FWCore/Modules/python/logErrorHarvester_cff.py b/FWCore/Modules/python/logErrorHarvester_cff.py index b6fc9bf1e9074..a17e5cd7ed0b5 100644 --- a/FWCore/Modules/python/logErrorHarvester_cff.py +++ b/FWCore/Modules/python/logErrorHarvester_cff.py @@ -1,6 +1,5 @@ import FWCore.ParameterSet.Config as cms from FWCore.Modules.logErrorHarvester_cfi import logErrorHarvester -import six def customiseLogErrorHarvesterUsingOutputCommands(process): logName = 'logErrorHarvester' @@ -9,7 +8,7 @@ def customiseLogErrorHarvesterUsingOutputCommands(process): modulesFromAllOutput = set() onlyOneOutput = (len(process.outputModules_()) == 1) - for o in six.itervalues(process.outputModules_()): + for o in process.outputModules_().values(): if not hasattr(o,"outputCommands"): continue modulesFromOutput = set() diff --git a/FWCore/ParameterSet/python/Config.py b/FWCore/ParameterSet/python/Config.py index c8ef6e9015a41..e4fdcb0367e82 100644 --- a/FWCore/ParameterSet/python/Config.py +++ b/FWCore/ParameterSet/python/Config.py @@ -3,7 +3,6 @@ ### command line options helper from __future__ import print_function from __future__ import absolute_import -import six import os from .Options import Options options = Options() @@ -242,7 +241,7 @@ def defaultOptions_(): def __updateOptions(self,opt): newOpts = self.defaultOptions_() if isinstance(opt,dict): - for k,v in six.iteritems(opt): + for k,v in opt.items(): setattr(newOpts,k,v) else: for p in opt.parameters_(): @@ -255,7 +254,7 @@ def defaultMaxEvents_(): def __updateMaxEvents(self,ps): newMax = self.defaultMaxEvents_() if isinstance(ps,dict): - for k,v in six.iteritems(ps): + for k,v in ps.items(): setattr(newMax,k,v) else: for p in ps.parameters_(): @@ -512,7 +511,7 @@ def __findFirstUsingModule(self, seqsOrTasks, mod): containing mod and return it. If none is found, return None""" from FWCore.ParameterSet.SequenceTypes import ModuleNodeVisitor l = list() - for seqOrTask in six.itervalues(seqsOrTasks): + for seqOrTask in seqsOrTasks.values(): l[:] = [] v = ModuleNodeVisitor(l) seqOrTask.visit(v) @@ -719,7 +718,7 @@ def extend(self,other,items=()): self.extend(item) #now create a sequence that uses the newly made items - for name,seq in six.iteritems(seqs): + for name,seq in seqs.items(): if id(seq) not in self._cloneToObjectDict: self.__setattr__(name,seq) else: @@ -729,7 +728,7 @@ def extend(self,other,items=()): #now put in proper bucket newSeq._place(name,self) - for name, task in six.iteritems(tasksToAttach): + for name, task in tasksToAttach.items(): self.__setattr__(name, task) #apply modifiers now that all names have been added @@ -770,54 +769,54 @@ def dumpConfig(self, options=PrintOptions()): config+=self._dumpConfigNamedList(self.subProcesses_(), 'subProcess', options) - config+=self._dumpConfigNamedList(six.iteritems(self.producers_()), + config+=self._dumpConfigNamedList(self.producers_().items(), 'module', options) - config+=self._dumpConfigNamedList(six.iteritems(self.switchProducers_()), + config+=self._dumpConfigNamedList(self.switchProducers_().items(), 'module', options) - config+=self._dumpConfigNamedList(six.iteritems(self.filters_()), + config+=self._dumpConfigNamedList(self.filters_().items(), 'module', options) - config+=self._dumpConfigNamedList(six.iteritems(self.analyzers_()), + config+=self._dumpConfigNamedList(self.analyzers_().items(), 'module', options) - config+=self._dumpConfigNamedList(six.iteritems(self.outputModules_()), + config+=self._dumpConfigNamedList(self.outputModules_().items(), 'module', options) - config+=self._dumpConfigNamedList(six.iteritems(self.sequences_()), + config+=self._dumpConfigNamedList(self.sequences_().items(), 'sequence', options) - config+=self._dumpConfigNamedList(six.iteritems(self.paths_()), + config+=self._dumpConfigNamedList(self.paths_().items(), 'path', options) - config+=self._dumpConfigNamedList(six.iteritems(self.endpaths_()), + config+=self._dumpConfigNamedList(self.endpaths_().items(), 'endpath', options) - config+=self._dumpConfigUnnamedList(six.iteritems(self.services_()), + config+=self._dumpConfigUnnamedList(self.services_().items(), 'service', options) - config+=self._dumpConfigNamedList(six.iteritems(self.aliases_()), + config+=self._dumpConfigNamedList(self.aliases_().items(), 'alias', options) config+=self._dumpConfigOptionallyNamedList( - six.iteritems(self.es_producers_()), + self.es_producers_().items(), 'es_module', options) config+=self._dumpConfigOptionallyNamedList( - six.iteritems(self.es_sources_()), + self.es_sources_().items(), 'es_source', options) config += self._dumpConfigESPrefers(options) - for name,item in six.iteritems(self.psets): + for name,item in self.psets.items(): config +=options.indentation()+item.configTypeName()+' '+name+' = '+item.configValue(options) - for name,item in six.iteritems(self.vpsets): + for name,item in self.vpsets.items(): config +=options.indentation()+'VPSet '+name+' = '+item.configValue(options) if self.schedule: pathNames = [p.label_() for p in self.schedule] config +=options.indentation()+'schedule = {'+','.join(pathNames)+'}\n' -# config+=self._dumpConfigNamedList(six.iteritems(self.vpsets), +# config+=self._dumpConfigNamedList(self.vpsets.items(), # 'VPSet', # options) config += "}\n" @@ -826,7 +825,7 @@ def dumpConfig(self, options=PrintOptions()): def _dumpConfigESPrefers(self, options): result = '' - for item in six.itervalues(self.es_prefers_()): + for item in self.es_prefers_().values(): result +=options.indentation()+'es_prefer '+item.targetLabel_()+' = '+item.dumpConfig(options) return result @@ -895,7 +894,7 @@ def _itemsInDependencyOrder(self, processDictionaryOfItems): # For each item, see what other items it depends upon # For our purpose here, an item depends on the items it contains. dependencies = {} - for label,item in six.iteritems(processDictionaryOfItems): + for label,item in processDictionaryOfItems.items(): containedItems = [] if isinstance(item, Task): v = TaskVisitor(containedItems) @@ -932,25 +931,25 @@ def _itemsInDependencyOrder(self, processDictionaryOfItems): # keep looping until we get rid of all dependencies while dependencies: oldDeps = dict(dependencies) - for label,deps in six.iteritems(oldDeps): + for label,deps in oldDeps.items(): if len(deps)==0: returnValue[label]=processDictionaryOfItems[label] #remove this as a dependency for all other tasks del dependencies[label] - for lb2,deps2 in six.iteritems(dependencies): + for lb2,deps2 in dependencies.items(): while deps2.count(label): deps2.remove(label) return returnValue def _dumpPython(self, d, options): result = '' - for name, value in sorted(six.iteritems(d)): + for name, value in sorted(d.items()): result += value.dumpPythonAs(name,options)+'\n' return result def _splitPython(self, subfolder, d, options): result = {} - for name, value in sorted(six.iteritems(d)): + for name, value in sorted(d.items()): result[name] = subfolder, value.dumpPythonAs(name, options) + '\n' return result @@ -1065,17 +1064,17 @@ def _replaceInSequences(self, label, new): # process known sequences to do a non-recursive change. Then do # a recursive change to get cases where a sub-sequence unknown to # the process has the item to be replaced - for sequenceable in six.itervalues(self.sequences): + for sequenceable in self.sequences.values(): sequenceable._replaceIfHeldDirectly(old,new) - for sequenceable in six.itervalues(self.sequences): + for sequenceable in self.sequences.values(): sequenceable.replace(old,new) - for sequenceable in six.itervalues(self.paths): + for sequenceable in self.paths.values(): sequenceable.replace(old,new) - for sequenceable in six.itervalues(self.endpaths): + for sequenceable in self.endpaths.values(): sequenceable.replace(old,new) def _replaceInTasks(self, label, new): old = getattr(self,label) - for task in six.itervalues(self.tasks): + for task in self.tasks.values(): task.replace(old, new) def _replaceInSchedule(self, label, new): if self.schedule_() == None: @@ -1089,7 +1088,7 @@ def globalReplace(self,label,new): raise LookupError("process has no item of label "+label) setattr(self,label,new) def _insertInto(self, parameterSet, itemDict): - for name,value in six.iteritems(itemDict): + for name,value in itemDict.items(): value.insertInto(parameterSet, name) def _insertOneInto(self, parameterSet, label, item, tracked): vitems = [] @@ -1100,7 +1099,7 @@ def _insertOneInto(self, parameterSet, label, item, tracked): parameterSet.addVString(tracked, label, vitems) def _insertManyInto(self, parameterSet, label, itemDict, tracked): l = [] - for name,value in six.iteritems(itemDict): + for name,value in itemDict.items(): value.appendToProcessDescList_(l, name) value.insertInto(parameterSet, name) # alphabetical order is easier to compare with old language @@ -1109,7 +1108,7 @@ def _insertManyInto(self, parameterSet, label, itemDict, tracked): def _insertSwitchProducersInto(self, parameterSet, labelModules, labelAliases, itemDict, tracked): modules = parameterSet.getVString(tracked, labelModules) aliases = parameterSet.getVString(tracked, labelAliases) - for name,value in six.iteritems(itemDict): + for name,value in itemDict.items(): value.appendToProcessDescLists_(modules, aliases, name) value.insertInto(parameterSet, name) modules.sort() @@ -1184,9 +1183,9 @@ def _insertPaths(self, processPSet, nodeVisitor): processPSet.addVString(False, "@filters_on_endpaths", endpathValidator.filtersOnEndpaths) def resolve(self,keepUnresolvedSequencePlaceholders=False): - for x in six.itervalues(self.paths): + for x in self.paths.values(): x.resolve(self.__dict__,keepUnresolvedSequencePlaceholders) - for x in six.itervalues(self.endpaths): + for x in self.endpaths.values(): x.resolve(self.__dict__,keepUnresolvedSequencePlaceholders) if not self.schedule_() == None: for task in self.schedule_()._tasks: @@ -1224,8 +1223,8 @@ def prune(self,verbose=False,keepUnresolvedSequencePlaceholders=False): t.visit(tv) tv.leave(t) else: - pths = list(six.itervalues(self.paths)) - pths.extend(six.itervalues(self.endpaths)) + pths = list(self.paths.values()) + pths.extend(self.endpaths.values()) temp = Schedule(*pths) usedModules=set(temp.moduleNames()) unneededModules = self._pruneModules(self.producers_(), usedModules) @@ -1235,15 +1234,15 @@ def prune(self,verbose=False,keepUnresolvedSequencePlaceholders=False): #remove sequences and tasks that do not appear in remaining paths and endpaths seqs = list() sv = SequenceVisitor(seqs) - for p in six.itervalues(self.paths): + for p in self.paths.values(): p.visit(sv) p.visit(tv) - for p in six.itervalues(self.endpaths): + for p in self.endpaths.values(): p.visit(sv) p.visit(tv) def removeUnneeded(seqOrTasks, allSequencesOrTasks): _keepSet = set(( s for s in seqOrTasks if s.hasLabel_())) - _availableSet = set(six.itervalues(allSequencesOrTasks)) + _availableSet = set(allSequencesOrTasks.values()) _unneededSet = _availableSet-_keepSet _unneededLabels = [] for s in _unneededSet: @@ -1316,29 +1315,29 @@ def __extractPSet(self,pset): # the modules, ESSources, ESProducers, and services it visits. nodeVisitor = NodeVisitor() self._insertPaths(adaptor, nodeVisitor) - all_modules_onTasksOrScheduled = { key:value for key, value in six.iteritems(all_modules) if value in nodeVisitor.modules } + all_modules_onTasksOrScheduled = { key:value for key, value in all_modules.items() if value in nodeVisitor.modules } self._insertManyInto(adaptor, "@all_modules", all_modules_onTasksOrScheduled, True) all_switches = self.switchProducers_().copy() - all_switches_onTasksOrScheduled = {key:value for key, value in six.iteritems(all_switches) if value in nodeVisitor.modules } + all_switches_onTasksOrScheduled = {key:value for key, value in all_switches.items() if value in nodeVisitor.modules } self._insertSwitchProducersInto(adaptor, "@all_modules", "@all_aliases", all_switches_onTasksOrScheduled, True) # Same as nodeVisitor except this one visits all the Tasks attached # to the process. processNodeVisitor = NodeVisitor() - for pTask in six.itervalues(self.tasks): + for pTask in self.tasks.values(): pTask.visit(processNodeVisitor) esProducersToEnable = {} - for esProducerName, esProducer in six.iteritems(self.es_producers_()): + for esProducerName, esProducer in self.es_producers_().items(): if esProducer in nodeVisitor.esProducers or not (esProducer in processNodeVisitor.esProducers): esProducersToEnable[esProducerName] = esProducer self._insertManyInto(adaptor, "@all_esmodules", esProducersToEnable, True) esSourcesToEnable = {} - for esSourceName, esSource in six.iteritems(self.es_sources_()): + for esSourceName, esSource in self.es_sources_().items(): if esSource in nodeVisitor.esSources or not (esSource in processNodeVisitor.esSources): esSourcesToEnable[esSourceName] = esSource self._insertManyInto(adaptor, "@all_essources", esSourcesToEnable, True) #handle services differently services = [] - for serviceName, serviceObject in six.iteritems(self.services_()): + for serviceName, serviceObject in self.services_().items(): if serviceObject in nodeVisitor.services or not (serviceObject in processNodeVisitor.services): serviceObject.insertInto(ServiceInjectorAdaptor(adaptor,services)) adaptor.addVPSet(False,"services",services) @@ -1392,7 +1391,7 @@ def _findPreferred(self, esname, d,*args,**kargs): else: # maybe it's an unnamed ESModule? found = False - for name, value in six.iteritems(d): + for name, value in d.items(): if value.type_() == esname: if found: raise RuntimeError("More than one ES module for "+esname) @@ -1516,11 +1515,11 @@ def __init__(self,args): self.__args = args def __call__(self,obj): params = {} - for k in six.iterkeys(self.__args): + for k in self.__args.keys(): if hasattr(obj,k): params[k] = getattr(obj,k) _modifyParametersFromDict(params, self.__args, self._raiseUnknownKey) - for k in six.iterkeys(self.__args): + for k in self.__args.keys(): if k in params: setattr(obj,k,params[k]) else: @@ -1909,7 +1908,7 @@ def testProcessInsertion(self): def testProcessExtend(self): class FromArg(object): def __init__(self,*arg,**args): - for name in six.iterkeys(args): + for name in args.keys(): self.__dict__[name]=args[name] a=EDAnalyzer("MyAnalyzer") diff --git a/FWCore/ParameterSet/python/DictTypes.py b/FWCore/ParameterSet/python/DictTypes.py index 755a946e39699..146014fb8c4bd 100644 --- a/FWCore/ParameterSet/python/DictTypes.py +++ b/FWCore/ParameterSet/python/DictTypes.py @@ -1,5 +1,4 @@ # helper classes for sorted and fixed dicts -import six class SortedKeysDict(dict): """a dict preserving order of keys""" # specialised __repr__ missing. @@ -17,10 +16,10 @@ def __init__(self,*args,**kw): else: self.list = list(args[0].keys()) return - self.list = list(six.iterkeys(super(SortedKeysDict,self))) + self.list = list(super(SortedKeysDict,self).keys()) def __repr__(self): - meat = ', '.join([ '%s: %s' % (repr(key), repr(val)) for key,val in six.iteritems(self) ]) + meat = ', '.join([ '%s: %s' % (repr(key), repr(val)) for key,val in self.items() ]) return '{' + meat + '}' def __iter__(self): for key in self.list: diff --git a/FWCore/ParameterSet/python/Mixins.py b/FWCore/ParameterSet/python/Mixins.py index 4cba92a528daf..97104715e5001 100644 --- a/FWCore/ParameterSet/python/Mixins.py +++ b/FWCore/ParameterSet/python/Mixins.py @@ -1,7 +1,6 @@ from __future__ import print_function from builtins import range, object import inspect -import six class _ConfigureComponent(object): """Denotes a class that can be used by the Processes class""" @@ -28,7 +27,7 @@ def __init__(self): self._registry = {} def _reset(self): - for lst in six.itervalues(self._registry): + for lst in self._registry.values(): lst[1] = False def registerSpecialImportForType(self, cls, impStatement): @@ -46,7 +45,7 @@ def registerUse(self, obj): def getSpecialImports(self): coll = set() - for (imp, used) in six.itervalues(self._registry): + for (imp, used) in self._registry.values(): if used: coll.add(imp) return sorted(coll) @@ -254,7 +253,7 @@ def __addParameter(self, name, value): def __setParameters(self,parameters): v = None - for name,value in six.iteritems(parameters): + for name,value in parameters.items(): if name == 'allowAnyLabel_': v = value continue @@ -709,7 +708,7 @@ def saveOrigin(obj, level): def _modifyParametersFromDict(params, newParams, errorRaiser, keyDepth=""): if len(newParams): #need to treat items both in params and myparams specially - for key,value in six.iteritems(newParams): + for key,value in newParams.items(): if key in params: if value is None: del params[key] @@ -721,7 +720,7 @@ def _modifyParametersFromDict(params, newParams, errorRaiser, keyDepth=""): _modifyParametersFromDict(p, value,errorRaiser, ("%s.%s" if isinstance(key, str) else "%s[%s]")%(keyDepth,key)) - for k,v in six.iteritems(p): + for k,v in p.items(): setattr(pset,k,v) oldkeys.discard(k) for k in oldkeys: @@ -736,7 +735,7 @@ def _modifyParametersFromDict(params, newParams, errorRaiser, keyDepth=""): _modifyParametersFromDict(p, value,errorRaiser, ("%s.%s" if isinstance(key, str) else "%s[%s]")%(keyDepth,key)) - for k,v in six.iteritems(p): + for k,v in p.items(): plist[k] = v else: raise ValueError("Attempted to change non PSet value "+keyDepth+" using a dictionary") diff --git a/FWCore/ParameterSet/python/Modules.py b/FWCore/ParameterSet/python/Modules.py index a1a03fe18c89d..fa0a9b899e066 100644 --- a/FWCore/ParameterSet/python/Modules.py +++ b/FWCore/ParameterSet/python/Modules.py @@ -6,7 +6,6 @@ from .Types import vstring, EDAlias -import six import copy from .ExceptionHandling import * class Service(_ConfigureComponent,_TypedParameterizable,_Unlabelable): @@ -94,7 +93,7 @@ def __init__(self,type_,targetLabel='',*arg,**kargs): if targetLabel is None: self._targetLabel = str('') if kargs: - for k,v in six.iteritems(kargs): + for k,v in kargs.items(): if not isinstance(v,vstring): raise RuntimeError('ESPrefer only allows vstring attributes. "'+k+'" is a '+str(type(v))) def _placeImpl(self,name,proc): @@ -282,7 +281,7 @@ def __addParameter(self, name, value): if not self.__typeIsValid(value): raise TypeError(name+" does not already exist, so it can only be set to a cms.EDProducer or cms.EDAlias") if name not in self._caseFunctionDict: - raise ValueError("Case '%s' is not allowed (allowed ones are %s)" % (name, ",".join(six.iterkeys(self._caseFunctionDict)))) + raise ValueError("Case '%s' is not allowed (allowed ones are %s)" % (name, ",".join(self._caseFunctionDict.keys()))) if name in self.__dict__: message = "Duplicate insert of member " + name message += "\nThe original parameters are:\n" @@ -293,7 +292,7 @@ def __addParameter(self, name, value): self._isModified = True def __setParameters(self, parameters): - for name, value in six.iteritems(parameters): + for name, value in parameters.items(): self.__addParameter(name, value) def __setattr__(self, name, value): @@ -326,7 +325,7 @@ def clone(self, **params): # Need special treatment as cms.EDProducer is not a valid parameter type (except in this case) myparams = dict() - for name, value in six.iteritems(params): + for name, value in params.items(): if value is None: continue elif isinstance(value, dict): diff --git a/FWCore/ParameterSet/python/SequenceVisitors.py b/FWCore/ParameterSet/python/SequenceVisitors.py index efccf4a45b390..012c67f1a1c28 100644 --- a/FWCore/ParameterSet/python/SequenceVisitors.py +++ b/FWCore/ParameterSet/python/SequenceVisitors.py @@ -2,7 +2,6 @@ from .SequenceTypes import * from .Modules import OutputModule, EDProducer, EDFilter, EDAnalyzer, Service, ESProducer, ESSource, _Module from .Mixins import _Labelable -import six # Use this on Tasks in the Schedule class ScheduleTaskValidator(object): @@ -110,7 +109,7 @@ class ModuleNamesFromGlobalsVisitor(object): by using globals() to lookup the variable names assigned to the modules. This allows the determination of the labels before the modules have been attached to a Process.""" def __init__(self,globals_,l): - self._moduleToName = { v[1]:v[0] for v in six.iteritems(globals_) if isinstance(v[1],_Module) } + self._moduleToName = { v[1]:v[0] for v in globals_.items() if isinstance(v[1],_Module) } self._names =l def enter(self,node): if isinstance(node,_Module): diff --git a/FWCore/ParameterSet/python/TreeCrawler.py b/FWCore/ParameterSet/python/TreeCrawler.py index baa4b52b8ed23..9d7bacc65047b 100755 --- a/FWCore/ParameterSet/python/TreeCrawler.py +++ b/FWCore/ParameterSet/python/TreeCrawler.py @@ -27,7 +27,6 @@ from builtins import range import sys, os, inspect, copy, struct, dis, imp import modulefinder -import six def packageNameFromFilename(name): return ".".join(name.replace("python/","").replace(".py","").split("/")[-3:]) @@ -269,13 +268,13 @@ def transformIntoGraph(depgraph,toplevel): packageDict[toplevel] = Package(toplevel, top = True) # create package objects - for key, value in six.iteritems(depgraph): + for key, value in depgraph.items(): if key.count(".") == 2 and key != toplevel: packageDict[key] = Package(key) for name in value.keys(): if name.count(".") == 2: packageDict[name] = Package(name) # now create dependencies - for key, value in six.iteritems(depgraph): + for key, value in depgraph.items(): if key.count(".") == 2 or key == toplevel: package = packageDict[key] package.dependencies = [packageDict[name] for name in value.keys() if name.count(".") == 2] diff --git a/FWCore/ParameterSet/python/Types.py b/FWCore/ParameterSet/python/Types.py index aac406dda2bce..b069226e74e9b 100644 --- a/FWCore/ParameterSet/python/Types.py +++ b/FWCore/ParameterSet/python/Types.py @@ -7,8 +7,7 @@ import codecs import copy import math -import six -from six.moves import builtins +import builtins _builtin_bool = bool @@ -879,7 +878,7 @@ def _isValid(value): return True def setValue(self,value): if isinstance(value,dict): - for k,v in six.iteritems(value): + for k,v in value.items(): setattr(self,k,v) def configValue(self, options=PrintOptions()): @@ -1241,7 +1240,7 @@ def makeCppPSet(module,cppPSetMaker): if not isinstance(module,dict): module = dict( ( (x,getattr(module,x)) for x in dir(module)) ) - for x,p in six.iteritems(module): + for x,p in module.items(): if isinstance(p,PSet): p.insertInto(cppPSetMaker,x) return cppPSetMaker @@ -1390,7 +1389,7 @@ def convertToPSet(name,module): def convertToVPSet( **kw ): returnValue = VPSet() - for name,module in six.iteritems(kw): + for name,module in kw.items(): returnValue.append(convertToPSet(name,module)) return returnValue diff --git a/FWCore/ParameterSet/python/Utilities.py b/FWCore/ParameterSet/python/Utilities.py index b5969c110a870..eac39114b0a61 100644 --- a/FWCore/ParameterSet/python/Utilities.py +++ b/FWCore/ParameterSet/python/Utilities.py @@ -61,10 +61,9 @@ def moduleLabelsInSequences(* sequences): def createTaskWithAllProducersAndFilters(process): from FWCore.ParameterSet.Config import Task - import six - l = [ p for p in six.itervalues(process.producers)] - l.extend( (f for f in six.itervalues(process.filters)) ) + l = [ p for p in process.producers.values()] + l.extend( (f for f in process.filters.values()) ) return Task(*l) def convertToSingleModuleEndPaths(process): @@ -72,10 +71,9 @@ def convertToSingleModuleEndPaths(process): and replace with new EndPaths each with only one module. """ import FWCore.ParameterSet.Config as cms - import six toRemove =[] added = [] - for n,ep in six.iteritems(process.endpaths_()): + for n,ep in process.endpaths_().items(): tsks = [] ep.visit(cms.TaskVisitor(tsks)) diff --git a/FWCore/ParameterSet/python/VarParsing.py b/FWCore/ParameterSet/python/VarParsing.py index ff3a75963b84b..0c8819d58ab18 100644 --- a/FWCore/ParameterSet/python/VarParsing.py +++ b/FWCore/ParameterSet/python/VarParsing.py @@ -5,7 +5,6 @@ from pprint import pprint from FWCore.Utilities.Enumerate import Enumerate from FWCore.Utilities.FileUtils import sectionNofTotal -import six class VarParsing (object): """Infrastructure to parse variable definitions passed to cmsRun @@ -561,13 +560,13 @@ def __str__ (self): retval = "" if len (self._singletons): retval = retval + "Singletons:\n" - for varName, value in sorted (six.iteritems(self._singletons)): + for varName, value in sorted (self._singletons.items()): retval = retval + form % (varName, value) + "\n"; if self._info.get(varName): retval = retval + formInfo % ('', self._info[varName]) + "\n" if len (self._singletons): retval = retval + "Lists:\n" - for varName, value in sorted (six.iteritems(self._lists)): + for varName, value in sorted (self._lists.items()): stringValue = "%s" % value if len (stringValue) < 76 - maxLen: retval = retval + form % (varName, value) + "\n" diff --git a/FWCore/ParameterSet/python/printPaths.py b/FWCore/ParameterSet/python/printPaths.py index 63b89a70bc037..4e843b6b5b76c 100644 --- a/FWCore/ParameterSet/python/printPaths.py +++ b/FWCore/ParameterSet/python/printPaths.py @@ -2,11 +2,10 @@ import FWCore.ParameterSet.SequenceTypes as sqt import FWCore.ParameterSet.Config as cms import FWCore.ParameterSet.Modules as mod -import six def printPaths(process): "print all the paths in the process" - for p in six.itervalues(process.paths): + for p in process.paths.values(): printPath(p) def printPath(pth, indent="", indentDelta=" ", type="path"): diff --git a/FWCore/ParameterSet/scripts/edmConfigSplit b/FWCore/ParameterSet/scripts/edmConfigSplit index 01495f6d6b9cb..281758d08245a 100755 --- a/FWCore/ParameterSet/scripts/edmConfigSplit +++ b/FWCore/ParameterSet/scripts/edmConfigSplit @@ -4,7 +4,6 @@ import sys import os import imp import argparse -import six from FWCore.ParameterSet.Mixins import PrintOptions @@ -54,7 +53,7 @@ options.useSubdirectories = args.subdirectories options.targetDirectory = args.output_directory files = process.splitPython(options) -for fn, c in six.iteritems(files): +for fn, c in files.items(): if fn == '-': continue d = os.path.dirname(fn) diff --git a/FWCore/ParameterSet/scripts/edmPythonConfigToCppValidation b/FWCore/ParameterSet/scripts/edmPythonConfigToCppValidation index 3e3e338196d53..f68b3a623b86d 100755 --- a/FWCore/ParameterSet/scripts/edmPythonConfigToCppValidation +++ b/FWCore/ParameterSet/scripts/edmPythonConfigToCppValidation @@ -5,7 +5,6 @@ from builtins import str from FWCore.ParameterSet.Modules import _TypedParameterizable from FWCore.ParameterSet.Mixins import _ValidatingParameterListBase import FWCore.ParameterSet.Config as cms -import six def simpleItemToString(param): return str(param) @@ -130,7 +129,7 @@ def expandRefToPSet(pset): def printParameterSetDescription(spacing,descName, pset, depth): pset = expandRefToPSet(pset) - for l,p in six.iteritems(pset.parameters_()): + for l,p in pset.parameters_().items(): if isinstance(p,cms.PSet): print(spacing+"{") newSpacing = spacing+" " @@ -195,7 +194,7 @@ def printListTypeParameter(spacing,psetName,pList,label,depth): def printParameterSet(spacing, psetName, pset, depth): pset = expandRefToPSet(pset) - for l,p in six.iteritems(pset.parameters_()): + for l,p in pset.parameters_().items(): if isinstance(p,cms.PSet): print(spacing+"{") newSpacing = spacing+" " @@ -251,7 +250,7 @@ for item in config: if not modules: raise RuntimeError("No module found in file '"+filename+"'") -modulesTypes = set(module.type_() for module in six.itervalues(modules)) +modulesTypes = set(module.type_() for module in modules.values()) if len(modulesTypes) > 1: raise RuntimeError("The file '"+filename+"' contains modules of different C++ types"); moduleType = modulesTypes.pop() @@ -266,7 +265,7 @@ if len(modules) > 1: newSpacing = spacing + ' '; else: newSpacing = spacing -for label, module in six.iteritems(modules): +for label, module in modules.items(): if len(modules) > 1: print(spacing+'{') print(newSpacing + '// ' + label) diff --git a/FWCore/ParameterSet/test/cmsconfig.py b/FWCore/ParameterSet/test/cmsconfig.py index d5332c8bdc1a2..c18db9c0deebf 100644 --- a/FWCore/ParameterSet/test/cmsconfig.py +++ b/FWCore/ParameterSet/test/cmsconfig.py @@ -25,7 +25,6 @@ import io import types -import six # TODO: Refactor pset_dict_to_string and class printable_parameter to # have a consistent view of the problem. Perhaps have a class @@ -39,7 +38,7 @@ def pset_dict_to_string(psetDict): stream = io.StringIO() stream.write('\n{\n') - for name, value in six.iteritems(psetDict): + for name, value in psetDict.items(): stream.write('%s' % printable_parameter(name, value)) stream.write('\n') @@ -51,7 +50,7 @@ def secsource_dict_to_string(secSourceDict): """Make a string representing the secsource""" stream = io.StringIO() stream.write("%s\n{\n" % secSourceDict["@classname"][2]) - for name, value in six.iteritems(secSourceDict): + for name, value in secSourceDict.items(): if name[0] != '@': stream.write('%s' % printable_parameter(name, value)) stream.write('\n') @@ -414,7 +413,7 @@ def __write_module_guts(self, moddict, fileobj): use any member data of the object, but I'm not sure we can rely on a new-enough version of Python to make use of static methods.""" - for name, value in six.iteritems(moddict): + for name, value in moddict.items(): if name[0] != '@': fileobj.write('%s' % printable_parameter(name, value)) fileobj.write('\n') diff --git a/FWCore/PythonUtilities/python/XML2Python.py b/FWCore/PythonUtilities/python/XML2Python.py index 17e3dbdf98cce..50263ce924956 100644 --- a/FWCore/PythonUtilities/python/XML2Python.py +++ b/FWCore/PythonUtilities/python/XML2Python.py @@ -7,7 +7,6 @@ import os import xml.sax.handler import pprint -import six class DataNode (object): @@ -136,7 +135,7 @@ def stringify (self, name = '', offset = 0): retval += '\n' + ' ' * offset retval += '%s: ' % name first = True - for key, value in sorted (six.iteritems(self._attrs)): + for key, value in sorted (self._attrs.items()): if first: retval += '{ \n' tempspace = offset + 3 diff --git a/FWCore/PythonUtilities/scripts/edmDumpEventContent b/FWCore/PythonUtilities/scripts/edmDumpEventContent index 3474ab998b509..e1c0f3897f496 100755 --- a/FWCore/PythonUtilities/scripts/edmDumpEventContent +++ b/FWCore/PythonUtilities/scripts/edmDumpEventContent @@ -9,7 +9,6 @@ import optparse import re import copy import subprocess -import six # define regex wrapperRE = re.compile (r'edm::Wrapper<(.+)\s*>$') @@ -116,7 +115,7 @@ class Branch (object): # that order here. order = ['type', 'module', 'label', 'process'] for obj in branchList: - for key, twoList in six.iteritems(lengthDict): + for key, twoList in lengthDict.items(): attribute = getattr (obj, key) if len (attribute) + 2 > twoList[0]: twoList[0] = len (attribute) + 2 diff --git a/FWCore/PythonUtilities/scripts/fjr2json.py b/FWCore/PythonUtilities/scripts/fjr2json.py index 2bb0cf9a77cf1..c8d0a40afa02c 100755 --- a/FWCore/PythonUtilities/scripts/fjr2json.py +++ b/FWCore/PythonUtilities/scripts/fjr2json.py @@ -9,7 +9,6 @@ import optparse import sys -import six if __name__ == '__main__': @@ -46,7 +45,7 @@ for runObject in runObjects: try: runs = ast.literal_eval(runObject) - for (run, lumis) in six.iteritems(runs): + for (run, lumis) in runs.items(): runList = runsLumisDict.setdefault (int(run), []) runList.extend(lumis) except ValueError: # Old style handled above diff --git a/FWCore/PythonUtilities/scripts/generateEDF.py b/FWCore/PythonUtilities/scripts/generateEDF.py index 5ce86535854ea..6091c826ffac5 100755 --- a/FWCore/PythonUtilities/scripts/generateEDF.py +++ b/FWCore/PythonUtilities/scripts/generateEDF.py @@ -13,7 +13,6 @@ import array import ROOT import math -import six sepRE = re.compile (r'[\s,;:]+') nonSpaceRE = re.compile (r'\S') @@ -176,7 +175,7 @@ def __init__ (self, filename, **kwargs): def __str__ (self): retval = 'run, lum del ( dt ) inst (#xng)\n' - for key, value in sorted (six.iteritems(self)): + for key, value in sorted (self.items()): retval += "%s\n" % value return retval @@ -200,7 +199,7 @@ def iteritems (self): def _integrateContainer (self): # calculate numbers for recorded integrated luminosity total = 0. - for key, lumi in six.iteritems(self): + for key, lumi in self.items(): total += lumi.recorded lumi.totalRecorded = total lumi.fracRecorded = old_div(total, self.totalRecLum) @@ -210,7 +209,7 @@ def _integrateContainer (self): return xingKeyList = [] maxAveInstLum = 0. - for key, lumi in six.iteritems(self): + for key, lumi in self.items(): if not lumi.xingInfo and not lumi.fixXingInfo(): if not self.noWarnings: print("Do not have lumi xing info for %s" % lumi.keyString) @@ -307,7 +306,7 @@ def makeEDFplot (lumiCont, eventsDict, totalWeight, outputFile, options): expectedVals = [] predVals = [] # loop over events - for key, eventList in sorted( six.iteritems(eventsDict) ): + for key, eventList in sorted( eventsDict.items() ): usePoints = True # should we add this point? if lumiCont.minRun and lumiCont.minRun > key[0] or \ @@ -410,7 +409,7 @@ def makeEDFplot (lumiCont, eventsDict, totalWeight, outputFile, options): eventTupList = [] if not lumiCont.xingInfo: raise RuntimeError("Luminosity Xing information missing.") - for key, eventList in sorted( six.iteritems(eventsDict) ): + for key, eventList in sorted( eventsDict.items() ): try: lumi = lumiCont[key] instLum = lumi.aveInstLum @@ -656,7 +655,7 @@ def makeEDFplot (lumiCont, eventsDict, totalWeight, outputFile, options): recLumValue = recLumis [recLumIndex] prevRecLumi = 0. done = False - for key, lumi in six.iteritems(cont): + for key, lumi in cont.items(): if prevRecLumi >= recLumValue and recLumValue < lumi.totalRecorded: # found it print("%s contains total recorded lumi %f" % \ diff --git a/FWCore/Services/bin/edmTracerLogToSimpleConfig.py b/FWCore/Services/bin/edmTracerLogToSimpleConfig.py index eb4e1faaa2b7b..012169732c240 100644 --- a/FWCore/Services/bin/edmTracerLogToSimpleConfig.py +++ b/FWCore/Services/bin/edmTracerLogToSimpleConfig.py @@ -14,7 +14,6 @@ #============================== import sys -import six f = open(sys.argv[1]) @@ -134,7 +133,7 @@ def finish(self): #needed to get rid of PathStatus modules at end of paths pathNamesAsModules = set( (fixName(n) for n in pathParser._pathToModules.iterkeys()) ) -for m,c in six.iteritems(consumesParser._consumesForModule): +for m,c in consumesParser._consumesForModule.items(): if m in pathNamesAsModules: continue if m in consumesParser._isAnalyzer: @@ -148,7 +147,7 @@ def finish(self): allModules.add(o) modulesWithConsumes.add(m) -for m in six.itervalues(pathParser._pathToModules): +for m in pathParser._pathToModules.values(): for i in m: allModules.add(i) @@ -157,7 +156,7 @@ def finish(self): print('t = cms.Task(*[%s])'%(",".join(["process.%s"%i for i in allModules if i not in consumesParser._isAnalyzer]))) -for p,m in six.iteritems(pathParser._pathToModules): +for p,m in pathParser._pathToModules.items(): if p in pathParser._isEndPath: print("process.%s = cms.EndPath(%s)"%(p,"+".join(["process.%s"%i for i in m]))) else: diff --git a/FWCore/Utilities/scripts/edmAddClassVersion b/FWCore/Utilities/scripts/edmAddClassVersion index ceee98bd08493..168e012f4f6d1 100755 --- a/FWCore/Utilities/scripts/edmAddClassVersion +++ b/FWCore/Utilities/scripts/edmAddClassVersion @@ -1,7 +1,6 @@ #! /usr/bin/env python3 import string, os from optparse import OptionParser -import six class ClassesDefXmlParser(object): """Parses a classes_def.xml file looking for class declarations that do not contain @@ -138,14 +137,14 @@ class GccXmlOutputParser(object): def getNameTree(self): nameTree = {} idToNode = {} - for id,atts in six.iteritems(self._contexts): + for id,atts in self._contexts.items(): childList = idToNode.setdefault(id,[isContext,{}]) if atts[1]: l=idToNode.setdefault(atts[1],[isContext,{}]) l[1][atts[0]]=childList else: nameTree[atts[0]]=childList - for id,atts in six.iteritems(self._typedefs): + for id,atts in self._typedefs.items(): typeID = self.resolvePossibleTypedef(atts[2]) if typeID not in idToNode: continue @@ -156,7 +155,7 @@ class GccXmlOutputParser(object): idToNode[atts[1]][1][atts[0]]=childList returnValue = {} #the actual bottom node doesn't have a name so we remove it - for i in six.itervalues(nameTree): + for i in nameTree.values(): returnValue.update(i[childrenIndex]) return returnValue @@ -283,11 +282,11 @@ if __name__ == '__main__': xmlfile = options.filedir+"/classes_def.xml" p = ClassesDefXmlParser(xmlfile) #print p.classes - classesToModify = [x[0] for x in six.iteritems(p.classes) if not checkIfTypedefOfTemplate([y for y in x[0].split(':') if y != ''],nameTree)] + classesToModify = [x[0] for x in p.classes.items() if not checkIfTypedefOfTemplate([y for y in x[0].split(':') if y != ''],nameTree)] classesWithChecksum = dict([(x,checksumForClass(x)) for x in classesToModify]) print 'Found the following non-templated classes which will be assigned a ClassVersion and the following checksum' - for name,checksum in six.iteritems(classesWithChecksum): + for name,checksum in classesWithChecksum.items(): print name,checksum #exit(0) diff --git a/FWCore/Utilities/scripts/edmCheckClassVersion b/FWCore/Utilities/scripts/edmCheckClassVersion index 983796a016695..897e4ea2e4ee8 100755 --- a/FWCore/Utilities/scripts/edmCheckClassVersion +++ b/FWCore/Utilities/scripts/edmCheckClassVersion @@ -1,7 +1,6 @@ #! /usr/bin/env python3 from __future__ import print_function from optparse import OptionParser -import six from sys import version_info if version_info[0] > 2: atol = int @@ -188,7 +187,7 @@ ROOT.gROOT.ProcessLine("checkclass checkTheClass;") p = XmlParser(options.xmlfile) foundErrors = dict() -for name,info in six.iteritems(p.classes): +for name,info in p.classes.items(): errorCode,rootClassVersion,classChecksum = checkClass(name,info[XmlParser.classVersionIndex],info[XmlParser.versionsToChecksumIndex]) if errorCode != noError: foundErrors[name]=(errorCode,classChecksum,rootClassVersion) @@ -197,7 +196,7 @@ for name,info in six.iteritems(p.classes): foundRootDoesNotMatchError = False originalToNormalizedNames = dict() -for name,retValues in six.iteritems(foundErrors): +for name,retValues in foundErrors.items(): origName = p.classes[name][XmlParser.originalNameIndex] originalToNormalizedNames[origName]=name code = retValues[0] diff --git a/FastSimulation/TrackingRecHitProducer/python/TrackingRecHitProducer_cfi.py b/FastSimulation/TrackingRecHitProducer/python/TrackingRecHitProducer_cfi.py index d1eb72a03a751..05e5c9363facb 100644 --- a/FastSimulation/TrackingRecHitProducer/python/TrackingRecHitProducer_cfi.py +++ b/FastSimulation/TrackingRecHitProducer/python/TrackingRecHitProducer_cfi.py @@ -1,5 +1,4 @@ # Python 2 vs 3 compatibility library: -import six import FWCore.ParameterSet.Config as cms @@ -61,8 +60,8 @@ } } -for subdetId,trackerLayers in six.iteritems(trackerStripGaussianResolutions): - for trackerLayer, resolutionX in six.iteritems(trackerLayers): +for subdetId,trackerLayers in trackerStripGaussianResolutions.items(): + for trackerLayer, resolutionX in trackerLayers.items(): pluginConfig = cms.PSet( name = cms.string(subdetId+str(trackerLayer)), type=cms.string("TrackingRecHitStripGSPlugin"), diff --git a/HLTrigger/Configuration/python/Tools/confdb.py b/HLTrigger/Configuration/python/Tools/confdb.py index 434a15b7da00a..aa8d22d5ed04f 100644 --- a/HLTrigger/Configuration/python/Tools/confdb.py +++ b/HLTrigger/Configuration/python/Tools/confdb.py @@ -8,7 +8,6 @@ from .pipe import pipe as _pipe from .options import globalTag from itertools import islice -import six def splitter(iterator, n): i = iterator.__iter__() @@ -69,7 +68,7 @@ def getSetupConfigurationFromDB(self): args = ['--configName', self.config.setup ] args.append('--noedsources') args.append('--nopaths') - for key, vals in six.iteritems(self.options): + for key, vals in self.options.items(): if vals: args.extend(('--'+key, ','.join(vals))) args.append('--cff') @@ -89,7 +88,7 @@ def getRawConfigurationFromDB(self): if not self.config.hilton: # keep the original Source when running on Hilton args.append('--noedsources') - for key, vals in six.iteritems(self.options): + for key, vals in self.options.items(): if vals: args.extend(('--'+key, ','.join(vals))) diff --git a/HLTrigger/Configuration/python/Tools/helper.py b/HLTrigger/Configuration/python/Tools/helper.py index 1e5b67111f0c5..5ab91ef90357a 100644 --- a/HLTrigger/Configuration/python/Tools/helper.py +++ b/HLTrigger/Configuration/python/Tools/helper.py @@ -8,27 +8,26 @@ """ import FWCore.ParameterSet.Config as cms -import six def findEDFilters(holder): if isinstance(holder, cms.Process): return process.filters_() else: - return dict( (name, module) for name, module in six.iteritems(holder) if isinstance(module, cms.EDFilter) ) + return dict( (name, module) for name, module in holder.items() if isinstance(module, cms.EDFilter) ) def findEDProducers(holder): if isinstance(holder, cms.Process): return process.producers_() else: - return dict( (name, module) for name, module in six.iteritems(holder) if isinstance(module, cms.EDProducer) ) + return dict( (name, module) for name, module in holder.items() if isinstance(module, cms.EDProducer) ) def findEDAnalyzers(holder): if isinstance(holder, cms.Process): return process.analyzers_() else: - return dict( (name, module) for name, module in six.iteritems(holder) if isinstance(module, cms.EDAnalyzer) ) + return dict( (name, module) for name, module in holder.items() if isinstance(module, cms.EDAnalyzer) ) def findModules(holder): @@ -39,6 +38,6 @@ def findModules(holder): modules.upate(process.filters_()) return modules else: - return dict( (name, module) for name, module in six.iteritems(holder) if isinstance(module, (cms.EDAnalyzer, _cms.EDProducer, _cms.EDFilter)) ) + return dict( (name, module) for name, module in holder.items() if isinstance(module, (cms.EDAnalyzer, _cms.EDProducer, _cms.EDFilter)) ) diff --git a/HLTrigger/Configuration/python/Utilities.py b/HLTrigger/Configuration/python/Utilities.py index 54af1f7e7d725..0c5ea5f094177 100644 --- a/HLTrigger/Configuration/python/Utilities.py +++ b/HLTrigger/Configuration/python/Utilities.py @@ -2,11 +2,10 @@ import HLTrigger.Configuration.Tools.options as _options import HLTrigger.Configuration.Tools.confdb as _confdb -import six def _build_options(**args): options = _options.HLTProcessOptions() - for key, val in six.iteritems(args): + for key, val in args.items(): setattr(options, key, val) return options diff --git a/HLTrigger/Configuration/python/common.py b/HLTrigger/Configuration/python/common.py index cfc8916fa76ca..da893a6c4e876 100644 --- a/HLTrigger/Configuration/python/common.py +++ b/HLTrigger/Configuration/python/common.py @@ -1,14 +1,13 @@ import itertools -import six import FWCore.ParameterSet.Config as cms def producers_by_type(process, *types): "Find all EDProducers in the Process that are instances of the given C++ type." switches = (module for module in (getattr(switchproducer, case) \ - for switchproducer in six.itervalues(process._Process__switchproducers) \ + for switchproducer in process._Process__switchproducers.values() \ for case in switchproducer.parameterNames_()) \ if isinstance(module, cms.EDProducer)) - return (module for module in itertools.chain(six.itervalues(process._Process__producers), switches) \ + return (module for module in itertools.chain(process._Process__producers.values(), switches) \ if module._TypedParameterizable__type in types) def filters_by_type(process, *types): @@ -26,18 +25,18 @@ def esproducers_by_type(process, *types): def modules_by_type(process, *types): "Find all modiles or other components in the Process that are instances of the given C++ type." switches = (module for module in (getattr(switchproducer, case) \ - for switchproducer in six.itervalues(process._Process__switchproducers) \ + for switchproducer in process._Process__switchproducers.values() \ for case in switchproducer.parameterNames_())) - return (module for module in itertools.chain(six.itervalues(process.__dict__), switches) \ + return (module for module in itertools.chain(process.__dict__.values(), switches) \ if hasattr(module, '_TypedParameterizable__type') and module._TypedParameterizable__type in types) def insert_modules_before(process, target, *modules): "Add the `modules` before the `target` in any Sequence, Paths or EndPath that contains the latter." for sequence in itertools.chain( - six.itervalues(process._Process__sequences), - six.itervalues(process._Process__paths), - six.itervalues(process._Process__endpaths) + process._Process__sequences.values(), + process._Process__paths.values(), + process._Process__endpaths.values() ): try: position = sequence.index(target) @@ -51,9 +50,9 @@ def insert_modules_before(process, target, *modules): def insert_modules_after(process, target, *modules): "Add the `modules` after the `target` in any Sequence, Paths or EndPath that contains the latter." for sequence in itertools.chain( - six.itervalues(process._Process__sequences), - six.itervalues(process._Process__paths), - six.itervalues(process._Process__endpaths) + process._Process__sequences.values(), + process._Process__paths.values(), + process._Process__endpaths.values() ): try: position = sequence.index(target) diff --git a/HLTrigger/Configuration/test/add/pythonToPythonTranslator.py b/HLTrigger/Configuration/test/add/pythonToPythonTranslator.py index 9e4b8e6da4810..703fc52db0b4a 100644 --- a/HLTrigger/Configuration/test/add/pythonToPythonTranslator.py +++ b/HLTrigger/Configuration/test/add/pythonToPythonTranslator.py @@ -7,7 +7,6 @@ # enable tracing cms.Sequences, cms.Paths and cms.EndPaths for all imported modules (thus, process.load(...), too) import tracingImport -import six result = dict() result['procname'] = '' @@ -72,7 +71,7 @@ def prepareParameter(parameter): return (type(parameter).__name__, trackedness(parameter), configValue ) if isinstance(parameter, cms.PSet): configValue = {} - for name, item in six.iteritems(parameter.parameters_()): + for name, item in parameter.parameters_().items(): configValue[name] = prepareParameter(item) return (type(parameter).__name__, trackedness(parameter), configValue ) else: @@ -81,7 +80,7 @@ def prepareParameter(parameter): def parsePSet(module): if module is None: return config = DictTypes.SortedKeysDict() - for parameterName,parameter in six.iteritems(module.parameters_()): + for parameterName,parameter in module.parameters_().items(): config[parameterName] = prepareParameter(parameter) return config @@ -89,7 +88,7 @@ def parseSource(module): if module is None: return config = DictTypes.SortedKeysDict() config['@classname'] = ('string','tracked',module.type_()) - for parameterName,parameter in six.iteritems(module.parameters_()): + for parameterName,parameter in module.parameters_().items(): config[parameterName] = prepareParameter(parameter) return config @@ -98,7 +97,7 @@ def parseModule(name, module): config = DictTypes.SortedKeysDict() config['@classname'] = ('string','tracked',module.type_()) config['@label'] = ('string','tracked',name) - for parameterName,parameter in six.iteritems(module.parameters_()): + for parameterName,parameter in module.parameters_().items(): config[parameterName] = prepareParameter(parameter) return config @@ -107,41 +106,41 @@ def parseModules(process): result['main_input'] = parseSource(process.source) - for name,item in six.iteritems(process.producers): + for name,item in process.producers.items(): result['modules'][name] = parseModule(name, item) - for name,item in six.iteritems(process.filters): + for name,item in process.filters.items(): result['modules'][name] = parseModule(name, item) - for name,item in six.iteritems(process.analyzers): + for name,item in process.analyzers.items(): result['modules'][name] = parseModule(name, item) - for name,item in six.iteritems(process.outputModules): + for name,item in process.outputModules.items(): result['modules'][name] = parseModule(name, item) result['output_modules'].append(name) - for name,item in six.iteritems(process.es_sources): + for name,item in process.es_sources.items(): result['es_sources'][name + '@'] = parseModule(name, item) - for name,item in six.iteritems(process.es_producers): + for name,item in process.es_producers.items(): result['es_modules'][name + '@'] = parseModule(name, item) - for name,item in six.iteritems(process.es_prefers): + for name,item in process.es_prefers.items(): result['es_prefers'][name + '@'] = parseModule(name, item) - for name,item in six.iteritems(process.psets): + for name,item in process.psets.items(): result['psets'][name] = parsePSet(item) - for name,item in six.iteritems(process.sequences): + for name,item in process.sequences.items(): result['sequences'][name] = "'" + item.dumpConfig("")[1:-2] + "'" - for name,item in six.iteritems(process.paths): + for name,item in process.paths.items(): result['paths'][name] = "'" + item.dumpConfig("")[1:-2] + "'" - for name,item in six.iteritems(process.endpaths): + for name,item in process.endpaths.items(): result['endpaths'][name] = "'" + item.dumpConfig("")[1:-2] + "'" - for name,item in six.iteritems(process.services): + for name,item in process.services.items(): result['services'][name] = parseModule(name, item) # TODO still missing: @@ -207,7 +206,7 @@ def parseModules(process): else: print ", '%s': {" % key comma = '' - for name,object in six.iteritems(result[key]): + for name,object in result[key].items(): print comma+"'%s': %s" %(name, dumpObject(object,key)) comma = ', ' print '} # end of %s' % key diff --git a/HLTrigger/Configuration/test/add/tracingImport.py b/HLTrigger/Configuration/test/add/tracingImport.py index 99f43365a53d2..9930d67abd24c 100644 --- a/HLTrigger/Configuration/test/add/tracingImport.py +++ b/HLTrigger/Configuration/test/add/tracingImport.py @@ -9,7 +9,7 @@ """ import sys, imp, -from six import builtins +import builtins import re # patterns to discover cms.Path and cms.EndPath definitions in imported files diff --git a/HLTrigger/HLTanalyzers/test/HLTBitAnalysis_WithLumi_cfg.py b/HLTrigger/HLTanalyzers/test/HLTBitAnalysis_WithLumi_cfg.py index ab9ad848b5556..cf86e4c11fddb 100644 --- a/HLTrigger/HLTanalyzers/test/HLTBitAnalysis_WithLumi_cfg.py +++ b/HLTrigger/HLTanalyzers/test/HLTBitAnalysis_WithLumi_cfg.py @@ -1,5 +1,4 @@ import FWCore.ParameterSet.Config as cms -import six ################################################################## @@ -101,9 +100,9 @@ #nc=0 if (isData): # replace all instances of "rawDataCollector" with "source" in InputTags from FWCore.ParameterSet import Mixins - for module in six.itervalues(process.__dict__): + for module in process.__dict__.values(): if isinstance(module, Mixins._Parameterizable): - for parameter in six.itervalues(module.__dict__): + for parameter in module.__dict__.values(): if isinstance(parameter, cms.InputTag): if parameter.moduleLabel == 'rawDataCollector': parameter.moduleLabel = 'source' diff --git a/HLTrigger/HLTanalyzers/test/HLTBitAnalysis_cfg.py b/HLTrigger/HLTanalyzers/test/HLTBitAnalysis_cfg.py index 05d9376483c7a..4709000f6fcd0 100644 --- a/HLTrigger/HLTanalyzers/test/HLTBitAnalysis_cfg.py +++ b/HLTrigger/HLTanalyzers/test/HLTBitAnalysis_cfg.py @@ -1,5 +1,4 @@ import FWCore.ParameterSet.Config as cms -import six ################################################################## @@ -88,9 +87,9 @@ #nc=0 if (isData): # replace all instances of "rawDataCollector" with "source" in InputTags from FWCore.ParameterSet import Mixins - for module in six.itervalues(process.__dict__): + for module in process.__dict__.values(): if isinstance(module, Mixins._Parameterizable): - for parameter in six.itervalues(module.__dict__): + for parameter in module.__dict__.values(): if isinstance(parameter, cms.InputTag): if parameter.moduleLabel == 'rawDataCollector': parameter.moduleLabel = 'source' diff --git a/HLTrigger/Tools/python/PDRates.py b/HLTrigger/Tools/python/PDRates.py index 28a7a404097d7..e9193cad000cc 100755 --- a/HLTrigger/Tools/python/PDRates.py +++ b/HLTrigger/Tools/python/PDRates.py @@ -7,7 +7,6 @@ from optparse import OptionParser -import six # -- # -- Usage : # -- Rate within a given PD : @@ -96,7 +95,7 @@ def RateInPD(Run,PrimaryDataset,lsMin,lsMax,printLS=False): lsmin=9999999 lsmax=-1 - for (LS,file) in six.iteritems(LSinFile): + for (LS,file) in LSinFile.items(): nls = NumberOfLSInFile[file] RatePerLS[LS] = RatePerLS[LS] / nls RatePerLS[LS] = RatePerLS[LS] / LS_Length @@ -141,7 +140,7 @@ def RateInPD(Run,PrimaryDataset,lsMin,lsMax,printLS=False): RateInPD(Run,PrimaryDataset,lsMin, lsMax, True) RatesTmp = open("rates_tmp.txt","w") #RatesTmpSort = open("rates_tmp_sort.txt","w") - for (LS, rate) in six.iteritems(RatePerLS): + for (LS, rate) in RatePerLS.items(): RatesTmp.write(LS+"\t"+repr(rate)+"\n") #if int(LS) >= lsMin and int(LS) <= lsMax: #nLS_within_range =nLS_within_range +1 diff --git a/HLTrigger/Tools/scripts/timingPdfMaker.py b/HLTrigger/Tools/scripts/timingPdfMaker.py index 328f7ab5d598e..39c4aa9a58187 100755 --- a/HLTrigger/Tools/scripts/timingPdfMaker.py +++ b/HLTrigger/Tools/scripts/timingPdfMaker.py @@ -22,7 +22,6 @@ import os.path import operator import subprocess -import six import sys, getopt sys.argv.append('-b') from ROOT import * @@ -88,9 +87,9 @@ def maininfo(infile, outfile): if not pathname in names4: names4[pathname] = k.ReadObj().GetMean() - names2 = dict(sorted(six.iteritems(names1), key=operator.itemgetter(1),reverse=True)[:10]) + names2 = dict(sorted(names1.items(), key=operator.itemgetter(1),reverse=True)[:10]) names3 = sorted(names2, key=names2.get, reverse=True) - names5 = dict(sorted(six.iteritems(names4), key=operator.itemgetter(1),reverse=True)[:10]) + names5 = dict(sorted(names4.items(), key=operator.itemgetter(1),reverse=True)[:10]) names6 = sorted(names5, key=names5.get, reverse=True) texfile = open(outfile+'-main.tex', 'w') diff --git a/HLTriggerOffline/Egamma/python/EgammaValidation_cff.py b/HLTriggerOffline/Egamma/python/EgammaValidation_cff.py index feaa8287507a3..860fab05e0351 100644 --- a/HLTriggerOffline/Egamma/python/EgammaValidation_cff.py +++ b/HLTriggerOffline/Egamma/python/EgammaValidation_cff.py @@ -1,6 +1,5 @@ import FWCore.ParameterSet.Config as cms from functools import reduce -import six # whether to use the old or newer (automatically adapting # to the MC menu) method of configuring the monitoring @@ -204,7 +203,7 @@ class dummy: #---------------------------------------- egammaSelectors = [] - for hltPathCategory, thisCategoryData in six.iteritems(configData): + for hltPathCategory, thisCategoryData in configData.items(): # all paths in the current category share the same # generator level requirement # @@ -253,7 +252,7 @@ class dummy: pathsByCategory = EgammaHLTValidationUtils.findEgammaPaths(refProcess) - for hltPathCategory, thisCategoryData in six.iteritems(configData): + for hltPathCategory, thisCategoryData in configData.items(): # get the HLT path objects for this category paths = pathsByCategory[hltPathCategory] diff --git a/HLTriggerOffline/Egamma/test/makePerPathConfigFiles.py b/HLTriggerOffline/Egamma/test/makePerPathConfigFiles.py index fcc6c71ad1c18..cf313691bf730 100755 --- a/HLTriggerOffline/Egamma/test/makePerPathConfigFiles.py +++ b/HLTriggerOffline/Egamma/test/makePerPathConfigFiles.py @@ -12,7 +12,6 @@ import FWCore.ParameterSet.Config as cms import HLTriggerOffline.Egamma.EgammaHLTValidationUtils as EgammaHLTValidationUtils import sys, os -import six # prefix for printouts # msgPrefix = "[" + os.path.basename(__file__) + "]" @@ -143,7 +142,7 @@ def makeOnePath(path, isFastSim): -for hltPathCategory, thisCategoryData in six.iteritems(configData): +for hltPathCategory, thisCategoryData in configData.items(): # get the HLT path objects for this category paths = pathsByCategory[hltPathCategory] diff --git a/HeterogeneousCore/SonicTriton/test/tritonTest_cfg.py b/HeterogeneousCore/SonicTriton/test/tritonTest_cfg.py index 73ee4ddeb4e8a..d9ba2be799cc5 100644 --- a/HeterogeneousCore/SonicTriton/test/tritonTest_cfg.py +++ b/HeterogeneousCore/SonicTriton/test/tritonTest_cfg.py @@ -1,6 +1,6 @@ from FWCore.ParameterSet.VarParsing import VarParsing import FWCore.ParameterSet.Config as cms -import os, sys, json, six +import os, sys, json # module/model correspondence models = { @@ -108,9 +108,10 @@ } keepMsgs = ['TritonClient','TritonService'] + for im,module in enumerate(options.modules): model = options.models[im] - Module = [obj for name,obj in six.iteritems(modules) if name in module][0] + Module = [obj for name,obj in modules.items() if name in module][0] setattr(process, module, Module(module, Client = cms.PSet( diff --git a/L1Trigger/Configuration/python/L1Trigger_custom.py b/L1Trigger/Configuration/python/L1Trigger_custom.py index a9f664f21441a..586fac6db66c3 100644 --- a/L1Trigger/Configuration/python/L1Trigger_custom.py +++ b/L1Trigger/Configuration/python/L1Trigger_custom.py @@ -4,7 +4,6 @@ # V.M. Ghete 2010-06-09 initial version import FWCore.ParameterSet.Config as cms -import six def customiseUnprescaleAlgoTriggers(process): @@ -164,7 +163,7 @@ def customiseL1EmulatorFromRaw(process): process.CaloTriggerPrimitives + process.SimL1Emulator ) - for path in six.itervalues(process._Process__paths): + for path in process._Process__paths.values(): path.replace(process.SimL1Emulator, process.CaloTPG_SimL1Emulator) # set the new input tags after RawToDigi @@ -237,11 +236,11 @@ def customiseL1GtEmulatorFromRaw(process): process.simGtDigis ) # replace the SimL1Emulator in all paths and sequences - for iterable in six.itervalues(process.sequences): + for iterable in process.sequences.values(): iterable.replace( process.SimL1Emulator, SimL1Emulator) - for iterable in six.itervalues(process.paths): + for iterable in process.paths.values(): iterable.replace( process.SimL1Emulator, SimL1Emulator) - for iterable in six.itervalues(process.endpaths): + for iterable in process.endpaths.values(): iterable.replace( process.SimL1Emulator, SimL1Emulator) process.SimL1Emulator = SimL1Emulator @@ -276,11 +275,11 @@ def customiseL1CaloAndGtEmulatorsFromRaw(process): process.simGtDigis ) # replace the SimL1Emulator in all paths and sequences - for iterable in six.itervalues(process.sequences): + for iterable in process.sequences.values(): iterable.replace( process.SimL1Emulator, SimL1Emulator) - for iterable in six.itervalues(process.paths): + for iterable in process.paths.values(): iterable.replace( process.SimL1Emulator, SimL1Emulator) - for iterable in six.itervalues(process.endpaths): + for iterable in process.endpaths.values(): iterable.replace( process.SimL1Emulator, SimL1Emulator) process.SimL1Emulator = SimL1Emulator diff --git a/L1Trigger/L1TCalorimeter/python/L1TCaloStage1_customForHLT.py b/L1Trigger/L1TCalorimeter/python/L1TCaloStage1_customForHLT.py index 2789955b159bb..1b09fe538eb51 100644 --- a/L1Trigger/L1TCalorimeter/python/L1TCaloStage1_customForHLT.py +++ b/L1Trigger/L1TCalorimeter/python/L1TCaloStage1_customForHLT.py @@ -3,7 +3,6 @@ import FWCore.ParameterSet.Config as cms import os -import six ############################################################################## @@ -121,11 +120,11 @@ def customiseL1EmulatorFromRaw(process): process.simGtDigis ) # replace the SimL1Emulator in all paths and sequences - for iterable in six.itervalues(process.sequences): + for iterable in process.sequences.values(): iterable.replace( process.SimL1Emulator, SimL1Emulator) - for iterable in six.itervalues(process.paths): + for iterable in process.paths.values(): iterable.replace( process.SimL1Emulator, SimL1Emulator) - for iterable in six.itervalues(process.endpaths): + for iterable in process.endpaths.values(): iterable.replace( process.SimL1Emulator, SimL1Emulator) process.SimL1Emulator = SimL1Emulator diff --git a/L1Trigger/L1TCalorimeter/python/convertParamsToOnlineFormat.py b/L1Trigger/L1TCalorimeter/python/convertParamsToOnlineFormat.py index 38ac16b5d13a6..0657288f38791 100644 --- a/L1Trigger/L1TCalorimeter/python/convertParamsToOnlineFormat.py +++ b/L1Trigger/L1TCalorimeter/python/convertParamsToOnlineFormat.py @@ -8,7 +8,6 @@ import sys import xml.etree.ElementTree as ET -import six # Pairwise generator: returns pairs of adjacent elements in a list / other iterable def pairwiseGen(aList): @@ -253,8 +252,8 @@ def createXML(parameters, contextId, outputFilePath): os.mkdir(args.output_dir) if args.mif: - for fileName, value in six.iteritems(getMifParameterMap(caloParams)): + for fileName, value in getMifParameterMap(caloParams).items(): createMIF(args.output_dir + '/' + fileName, value) else: - for fileTag, paramList in six.iteritems(getXmlParameterMap(caloParams)): + for fileTag, paramList in getXmlParameterMap(caloParams).items(): createXML(paramList, 'MainProcessor' if fileTag.startswith('mp') else 'Demux', args.output_dir + '/algo_' + fileTag + '.xml') diff --git a/L1Trigger/L1TMuonEndCap/test/unittests/FWLiteAnalyzer.py b/L1Trigger/L1TMuonEndCap/test/unittests/FWLiteAnalyzer.py index ae7682d7f5728..a13144589409c 100644 --- a/L1Trigger/L1TMuonEndCap/test/unittests/FWLiteAnalyzer.py +++ b/L1Trigger/L1TMuonEndCap/test/unittests/FWLiteAnalyzer.py @@ -8,7 +8,6 @@ from ROOT import gROOT, gSystem, AutoLibraryLoader from DataFormats.FWLite import Events, Handle -import six class FWLiteAnalyzer(object): @@ -28,7 +27,7 @@ def __init__(self, inputFiles=None, handles=None, firstEvent=None, maxEvents=Non self.handles = {} self.handle_labels = {} if handles: - for k, v in six.iteritems(handles): + for k, v in handles.items(): self.handles[k] = Handle(v[0]) self.handle_labels[k] = v[1] @@ -71,7 +70,7 @@ def process(self, event): return def getHandles(self, event): - for k, v in six.iteritems(self.handles): + for k, v in self.handles.items(): label = self.handle_labels[k] event.getByLabel(label, v) return diff --git a/L1TriggerConfig/Utilities/test/bmtfDump.py b/L1TriggerConfig/Utilities/test/bmtfDump.py index f9922e14c8663..ce94a7d1c09a7 100644 --- a/L1TriggerConfig/Utilities/test/bmtfDump.py +++ b/L1TriggerConfig/Utilities/test/bmtfDump.py @@ -2,7 +2,6 @@ import re import os, sys, shutil import subprocess -import six """ A simple helper script that provided with no arguments dumps a list of top-level keys, and provided with any key from this list as an argument, @@ -79,7 +78,7 @@ } # do the main job here -for config,fileName in six.iteritems(batch): +for config,fileName in batch.items(): sqlplus = subprocess.Popen(sqlplusCmd, shell=False, stdout=subprocess.PIPE, stdin=subprocess.PIPE) with open(fileName,'w') as f: diff --git a/L1TriggerConfig/Utilities/test/caloL1Dump.py b/L1TriggerConfig/Utilities/test/caloL1Dump.py index 8da9fe1efcc08..250aa97820e75 100644 --- a/L1TriggerConfig/Utilities/test/caloL1Dump.py +++ b/L1TriggerConfig/Utilities/test/caloL1Dump.py @@ -3,7 +3,6 @@ import re import os, sys, shutil import subprocess -import six """ A simple helper script that provided with no arguments dumps a list of top-level keys, and provided with any key from this list as an argument, @@ -70,7 +69,7 @@ } # do the main job here -for config,fileName in six.iteritems(batch): +for config,fileName in batch.items(): sqlplus = subprocess.Popen(sqlplusCmd, shell=False, stdout=subprocess.PIPE, stdin=subprocess.PIPE) with open(fileName,'w') as f: diff --git a/L1TriggerConfig/Utilities/test/caloL2Dump.py b/L1TriggerConfig/Utilities/test/caloL2Dump.py index 997752221ae3c..c977c11f6a489 100644 --- a/L1TriggerConfig/Utilities/test/caloL2Dump.py +++ b/L1TriggerConfig/Utilities/test/caloL2Dump.py @@ -3,7 +3,6 @@ import re import os, sys, shutil import subprocess -import six """ A simple helper script that provided with no arguments dumps a list of top-level keys, and provided with any key from this list as an argument, @@ -82,7 +81,7 @@ } # do the main job here -for config,fileName in six.iteritems(batch): +for config,fileName in batch.items(): sqlplus = subprocess.Popen(sqlplusCmd, shell=False, stdout=subprocess.PIPE, stdin=subprocess.PIPE) query = queryAlgoKeys.format(config,queryKeys) diff --git a/L1TriggerConfig/Utilities/test/emtfDump.py b/L1TriggerConfig/Utilities/test/emtfDump.py index c669f14a893a9..33fc66d942717 100644 --- a/L1TriggerConfig/Utilities/test/emtfDump.py +++ b/L1TriggerConfig/Utilities/test/emtfDump.py @@ -2,7 +2,6 @@ import re import os, sys, shutil import subprocess -import six """ A simple helper script that provided with no arguments dumps a list of top-level keys, and provided with any key from this list as an argument, @@ -80,7 +79,7 @@ } # do the main job here -for config,fileName in six.iteritems(batch): +for config,fileName in batch.items(): sqlplus = subprocess.Popen(sqlplusCmd, shell=False, stdout=subprocess.PIPE, stdin=subprocess.PIPE) with open(fileName,'w') as f: diff --git a/L1TriggerConfig/Utilities/test/omtfDump.py b/L1TriggerConfig/Utilities/test/omtfDump.py index 5b4cb8cef9ec4..3662e21dc70e8 100644 --- a/L1TriggerConfig/Utilities/test/omtfDump.py +++ b/L1TriggerConfig/Utilities/test/omtfDump.py @@ -2,7 +2,6 @@ import re import os, sys, shutil import subprocess -import six """ A simple helper script that provided with no arguments dumps a list of top-level keys, and provided with any key from this list as an argument, @@ -70,7 +69,7 @@ } # do the main job here -for config,fileName in six.iteritems(batch): +for config,fileName in batch.items(): sqlplus = subprocess.Popen(sqlplusCmd, shell=False, stdout=subprocess.PIPE, stdin=subprocess.PIPE) with open(fileName,'w') as f: diff --git a/L1TriggerConfig/Utilities/test/ugmtDump.py b/L1TriggerConfig/Utilities/test/ugmtDump.py index 7b15d336eb31d..a7e91c32de006 100644 --- a/L1TriggerConfig/Utilities/test/ugmtDump.py +++ b/L1TriggerConfig/Utilities/test/ugmtDump.py @@ -2,7 +2,6 @@ import re import os, sys, shutil import subprocess -import six """ A simple helper script that provided with no arguments dumps a list of top-level keys, and provided with any key from this list as an argument, @@ -87,7 +86,7 @@ } # do the main job here -for config,fileName in six.iteritems(batch): +for config,fileName in batch.items(): sqlplus = subprocess.Popen(sqlplusCmd, shell=False, stdout=subprocess.PIPE, stdin=subprocess.PIPE) with open(fileName,'w') as f: diff --git a/L1TriggerConfig/Utilities/test/ugtDump.py b/L1TriggerConfig/Utilities/test/ugtDump.py index 1b6d865e587e2..d126967e7c151 100644 --- a/L1TriggerConfig/Utilities/test/ugtDump.py +++ b/L1TriggerConfig/Utilities/test/ugtDump.py @@ -2,7 +2,6 @@ import re import os, sys, shutil import subprocess -import six """ A simple helper script that provided with no arguments dumps a list of top-level keys, and provided with any key from this list as an argument, @@ -103,7 +102,7 @@ } # do the main job here -for config,fileName in six.iteritems(batch): +for config,fileName in batch.items(): sqlplus = subprocess.Popen(sqlplusCmd, shell=False, stdout=subprocess.PIPE, stdin=subprocess.PIPE) with open(fileName,'w') as f: diff --git a/PhysicsTools/Heppy/python/analyzers/core/Analyzer.py b/PhysicsTools/Heppy/python/analyzers/core/Analyzer.py index fa0145506d6b2..328d884a6270a 100644 --- a/PhysicsTools/Heppy/python/analyzers/core/Analyzer.py +++ b/PhysicsTools/Heppy/python/analyzers/core/Analyzer.py @@ -3,7 +3,6 @@ import logging from PhysicsTools.HeppyCore.framework.analyzer import Analyzer as CoreAnalyzer -import six class Analyzer(CoreAnalyzer): '''Base Analyzer class. Used in Looper.''' @@ -31,8 +30,8 @@ def readCollections(self, iEvent ): # if not self.beginLoopCalled: # # necessary in case the user calls process to go straight to a given event, before looping # self.beginLoop(setup) - for str,handle in six.iteritems(self.handles): + for str,handle in self.handles.items(): handle.Load( iEvent ) if self.cfg_comp.isMC: - for str,handle in six.iteritems(self.mchandles): + for str,handle in self.mchandles.items(): handle.Load( iEvent ) diff --git a/PhysicsTools/Heppy/python/analyzers/core/AutoFillTreeProducer.py b/PhysicsTools/Heppy/python/analyzers/core/AutoFillTreeProducer.py index b93b339ea845f..ce6a1ded7ae31 100644 --- a/PhysicsTools/Heppy/python/analyzers/core/AutoFillTreeProducer.py +++ b/PhysicsTools/Heppy/python/analyzers/core/AutoFillTreeProducer.py @@ -5,7 +5,6 @@ from PhysicsTools.Heppy.analyzers.core.autovars import * from PhysicsTools.Heppy.analyzers.objects.autophobj import * -import six class AutoFillTreeProducer( TreeAnalyzerNumpy ): @@ -40,7 +39,7 @@ def declareHandles(self): super(AutoFillTreeProducer, self).declareHandles() # self.handles['TriggerResults'] = AutoHandle( ('TriggerResults','','HLT'), 'edm::TriggerResults' ) self.mchandles['GenInfo'] = AutoHandle( ('generator','',''), 'GenEventInfoProduct' ) - for k,v in six.iteritems(self.collections): + for k,v in self.collections.items(): if isinstance(v, tuple) and isinstance(v[0], AutoHandle): self.handles[k] = v[0] @@ -53,7 +52,7 @@ def declareCoreVariables(self, tr, isMC): # self.triggerBitCheckers = [] # if hasattr(self.cfg_ana, 'triggerBits'): - # for T, TL in six.iteritems(self.cfg_ana.triggerBits): + # for T, TL in self.cfg_ana.triggerBits.items(): # trigVec = ROOT.vector(ROOT.string)() # for TP in TL: # trigVec.push_back(TP) @@ -98,9 +97,9 @@ def declareVariables(self,setup): for v in self.globalVariables: v.makeBranch(tree, isMC) - for o in six.itervalues(self.globalObjects): + for o in self.globalObjects.values(): o.makeBranches(tree, isMC) - for c in six.itervalues(self.collections): + for c in self.collections.values(): if isinstance(c, tuple): c = c[-1] if self.scalar: c.makeBranchesScalar(tree, isMC) @@ -161,11 +160,11 @@ def fillTree(self, event, resetFirst=True): if not isMC and v.mcOnly: continue v.fillBranch(self.tree, event, isMC) - for on, o in six.iteritems(self.globalObjects): + for on, o in self.globalObjects.items(): if not isMC and o.mcOnly: continue o.fillBranches(self.tree, getattr(event, on), isMC) - for cn, c in six.iteritems(self.collections): + for cn, c in self.collections.items(): if isinstance(c, tuple) and isinstance(c[0], AutoHandle): if not isMC and c[-1].mcOnly: continue objects = self.handles[cn].product() diff --git a/PhysicsTools/Heppy/python/analyzers/core/TriggerBitAnalyzer.py b/PhysicsTools/Heppy/python/analyzers/core/TriggerBitAnalyzer.py index f12b77f4aebca..5cbc62e741c10 100644 --- a/PhysicsTools/Heppy/python/analyzers/core/TriggerBitAnalyzer.py +++ b/PhysicsTools/Heppy/python/analyzers/core/TriggerBitAnalyzer.py @@ -4,7 +4,6 @@ from PhysicsTools.Heppy.analyzers.core.Analyzer import Analyzer from PhysicsTools.Heppy.analyzers.core.AutoHandle import AutoHandle from PhysicsTools.Heppy.analyzers.core.AutoFillTreeProducer import NTupleVariable -import six import PhysicsTools.HeppyCore.framework.config as cfg class TriggerBitAnalyzer( Analyzer ): @@ -43,7 +42,7 @@ def beginLoop(self, setup): self.allPaths = set() self.triggerBitCheckersSingleBits = [] - for T, TL in six.iteritems(self.cfg_ana.triggerBits): + for T, TL in self.cfg_ana.triggerBits.items(): trigVec = ROOT.vector(ROOT.string)() for TP in TL: trigVec.push_back(TP) diff --git a/PhysicsTools/Heppy/python/analyzers/examples/ntuple.py b/PhysicsTools/Heppy/python/analyzers/examples/ntuple.py index a4214586aeee7..9b398d8717702 100644 --- a/PhysicsTools/Heppy/python/analyzers/examples/ntuple.py +++ b/PhysicsTools/Heppy/python/analyzers/examples/ntuple.py @@ -1,6 +1,5 @@ #!/bin/env python from __future__ import print_function -import six def var( tree, varName, type=float ): tree.var(varName, type) @@ -77,7 +76,7 @@ def fillTau(tree, pName, tau): fillParticle(tree, pName, tau) fill(tree, '{pName}_nsigcharged'.format(pName=pName), len(tau.signalCharged())) fill(tree, '{pName}_isolation'.format(pName=pName), tau.isolation()) - for discName, value in six.iteritems(tau.discs): + for discName, value in tau.discs.items(): fill(tree, '{pName}_{disc}'.format(pName=pName, disc=discName), value) diff --git a/PhysicsTools/Heppy/python/analyzers/objects/JetAnalyzer.py b/PhysicsTools/Heppy/python/analyzers/objects/JetAnalyzer.py index 8ded7decb79b1..ca8c82ec25554 100644 --- a/PhysicsTools/Heppy/python/analyzers/objects/JetAnalyzer.py +++ b/PhysicsTools/Heppy/python/analyzers/objects/JetAnalyzer.py @@ -10,7 +10,6 @@ from PhysicsTools.Heppy.physicsutils.QGLikelihoodCalculator import QGLikelihoodCalculator -import six import copy def cleanNearestJetOnly(jets,leptons,deltaR): dr2 = deltaR**2 @@ -332,7 +331,7 @@ def process(self, event): jetNus=[x for x in event.genParticles if abs(x.pdgId()) in [12,14,16] and self.cfg_ana.genNuSelection(x) ] pairs= matchObjectCollection (jetNus, self.genJets, 0.4**2) - for (nu,genJet) in six.iteritems(pairs) : + for (nu,genJet) in pairs.items() : if genJet is not None : if not hasattr(genJet,"nu") : genJet.nu=nu.p4() diff --git a/PhysicsTools/Heppy/python/physicsutils/TauDecayModes.py b/PhysicsTools/Heppy/python/physicsutils/TauDecayModes.py index f6241acd35229..a443957c4b4f6 100644 --- a/PhysicsTools/Heppy/python/physicsutils/TauDecayModes.py +++ b/PhysicsTools/Heppy/python/physicsutils/TauDecayModes.py @@ -8,7 +8,6 @@ def __init__(self): def _decayModes(self): '''Builds the internal dictionaries from the enum defined in http://cmslxr.fnal.gov/lxr/source/DataFormats/TauReco/interface/PFTau.h''' - import six tmp = [ 'kNull', 'kOneProng0PiZero', @@ -30,7 +29,7 @@ def _decayModes(self): ] self.decayModes = dict( (index-1, name) for index, name in enumerate( tmp ) ) self.decayModeNames = dict( (value, key) for key, value \ - in six.iteritems(self.decayModes) ) + in self.decayModes.items() ) def intToName( self, anInt ): '''Returns the decay mode name corresponding to an int.''' diff --git a/PhysicsTools/Heppy/python/utils/rltinfo.py b/PhysicsTools/Heppy/python/utils/rltinfo.py index 8472a503710ed..16c0da8719806 100644 --- a/PhysicsTools/Heppy/python/utils/rltinfo.py +++ b/PhysicsTools/Heppy/python/utils/rltinfo.py @@ -1,7 +1,6 @@ from __future__ import print_function from ROOT import TFile from PhysicsTools.HeppyCore.statistics.tree import Tree as Tree -import six class MyInteger(object): def __init__(self, integer ): @@ -26,7 +25,7 @@ def add(self, trigger, run, lumi): def __str__(self): lines = [] - for rlt, count in six.iteritems(self.dict): + for rlt, count in self.dict.items(): lines.append( ': '.join( [str(rlt), str(count)] )) return '\n'.join(lines) @@ -37,7 +36,7 @@ def write(self, dirName, fileName='RLTInfo.root'): t.var('lumi', int ) t.var('counts', int ) t.var('trigger', int ) - for rlt, count in six.iteritems(self.dict): + for rlt, count in self.dict.items(): t.fill('run', rlt[1]) t.fill('lumi', rlt[2]) t.fill( 'counts', count.integer) @@ -54,7 +53,7 @@ def write(self, dirName, fileName='RLTInfo.root'): rltinfo.add('HLT1', 129, 2) rltinfo.add('HLT2', 129, 2) - for rlt, count in six.iteritems(rltinfo.dict): + for rlt, count in rltinfo.dict.items(): print(rlt, count) rltinfo.write('.') diff --git a/PhysicsTools/Heppy/scripts/heppy_report.py b/PhysicsTools/Heppy/scripts/heppy_report.py index b4e964d526c5c..08e4db6c0eafe 100755 --- a/PhysicsTools/Heppy/scripts/heppy_report.py +++ b/PhysicsTools/Heppy/scripts/heppy_report.py @@ -3,7 +3,6 @@ from builtins import range from optparse import OptionParser import json -import six def root2map(dir,ana,treename): import ROOT @@ -28,9 +27,9 @@ def root2map(dir,ana,treename): jsonind[run] = list(set(jsonind[run])) nruns = len(jsonind) - nlumis = sum(len(v) for v in six.itervalues(jsonind)) + nlumis = sum(len(v) for v in jsonind.values()) jsonmap = {} - for r,lumis in six.iteritems(jsonind): + for r,lumis in jsonind.items(): if len(lumis) == 0: continue # shouldn't happen lumis.sort() ranges = [ [ lumis[0], lumis[0] ] ] diff --git a/PhysicsTools/HeppyCore/python/framework/config.py b/PhysicsTools/HeppyCore/python/framework/config.py index 0dc1035bafd12..e03c193508423 100644 --- a/PhysicsTools/HeppyCore/python/framework/config.py +++ b/PhysicsTools/HeppyCore/python/framework/config.py @@ -6,7 +6,6 @@ from .weight import Weight import copy import glob -import six def printComps(comps, details=False): ''' @@ -44,7 +43,7 @@ def __str__(self): header = '{type}: {name}'.format( type=self.__class__.__name__, name=self.name) varlines = ['\t{var:<15}: {value}'.format(var=var, value=value) \ - for var,value in sorted(vars(six.iteritems(self))) \ + for var,value in sorted(vars(self.items())) \ if var is not 'name'] all = [ header ] all.extend(varlines) @@ -73,7 +72,7 @@ def clone(self, **kwargs): module2 will share the same instance of value1, and not have two copies. ''' other = copy.copy(self) - for k,v in six.iteritems(kwargs): + for k,v in kwargs.items(): setattr(other, k, v) return other diff --git a/PhysicsTools/HeppyCore/python/framework/event.py b/PhysicsTools/HeppyCore/python/framework/event.py index 1e380537d48eb..d98faac95ffd7 100644 --- a/PhysicsTools/HeppyCore/python/framework/event.py +++ b/PhysicsTools/HeppyCore/python/framework/event.py @@ -1,6 +1,5 @@ import collections from ROOT import TChain -import six class Event(object): '''Event class. @@ -28,7 +27,7 @@ def __str__(self): header = '{type}: {iEv}'.format( type=self.__class__.__name__, iEv = self.iEv) varlines = [] - for var,value in sorted(vars(six.iteritems(self))): + for var,value in sorted(vars(self.items())): tmp = value # check for recursivity recursive = False diff --git a/PhysicsTools/HeppyCore/python/framework/looper.py b/PhysicsTools/HeppyCore/python/framework/looper.py index 45c9956a5cbad..d8415f4c3a590 100644 --- a/PhysicsTools/HeppyCore/python/framework/looper.py +++ b/PhysicsTools/HeppyCore/python/framework/looper.py @@ -14,7 +14,6 @@ import timeit import resource import json -import six class Setup(object): '''The Looper creates a Setup object to hold information relevant during @@ -281,7 +280,7 @@ def write(self): jsonfilename = options.options jfile = open (jsonfilename, 'r') opts=json.loads(jfile.readline()) - for k,v in six.iteritems(opts): + for k,v in opts.items(): _heppyGlobalOptions[k]=v jfile.close() diff --git a/PhysicsTools/HeppyCore/python/framework/weight.py b/PhysicsTools/HeppyCore/python/framework/weight.py index 753eebda1ff7a..0bf53ba308317 100644 --- a/PhysicsTools/HeppyCore/python/framework/weight.py +++ b/PhysicsTools/HeppyCore/python/framework/weight.py @@ -3,8 +3,7 @@ # https://github.com/cbernet/heppy/blob/master/LICENSE def printWeights( weights ): - import six - for key, value in six.iteritems(weights): + for key, value in weights.items(): print(key) print(value) diff --git a/PhysicsTools/HeppyCore/python/statistics/tree.py b/PhysicsTools/HeppyCore/python/statistics/tree.py index 87059a6517b8c..4d0d5be2e0819 100644 --- a/PhysicsTools/HeppyCore/python/statistics/tree.py +++ b/PhysicsTools/HeppyCore/python/statistics/tree.py @@ -1,7 +1,6 @@ import numpy from ROOT import TTree import ROOT -import six class Tree(object): @@ -102,12 +101,12 @@ def vector(self, varName, lenvar, maxlen=None, type=float, default=-99, title=No self.vecdefaults[varName] = default def reset(self): - for name,value in six.iteritems(self.vars): + for name,value in self.vars.items(): if name in self.fillers: self.fillers[name](value, self.defaults[name]) else: value[0]=self.defaults[name] - for name,value in six.iteritems(self.vecvars): + for name,value in self.vecvars.items(): if isinstance(value, numpy.ndarray): value.fill(self.vecdefaults[name]) else: diff --git a/PhysicsTools/HeppyCore/python/utils/dataset.py b/PhysicsTools/HeppyCore/python/utils/dataset.py index 7286394926bc0..5bd37e424d5d3 100644 --- a/PhysicsTools/HeppyCore/python/utils/dataset.py +++ b/PhysicsTools/HeppyCore/python/utils/dataset.py @@ -12,7 +12,6 @@ from .castorBaseDir import castorBaseDir from . import eostools as castortools import fnmatch -import six class IntegrityCheckError(Exception): def __init__(self, value): @@ -296,7 +295,7 @@ def buildListOfBadFiles(self): self.maskExists = True self.report = report dup = report.get('ValidDuplicates',{}) - for name, status in six.iteritems(report['Files']): + for name, status in report['Files'].items(): # print name, status if not status[0]: self.bad_files[name] = 'MarkedBad' diff --git a/PhysicsTools/HeppyCore/python/utils/edmIntegrityCheck.py b/PhysicsTools/HeppyCore/python/utils/edmIntegrityCheck.py index 80a74b379eaa9..cf7595e1a5b99 100644 --- a/PhysicsTools/HeppyCore/python/utils/edmIntegrityCheck.py +++ b/PhysicsTools/HeppyCore/python/utils/edmIntegrityCheck.py @@ -13,7 +13,6 @@ from .timeout import timed_out, TimedOutExc from .castorBaseDir import castorBaseDir from .dataset import CMSDataset -import six class PublishToFileSystem(object): """Write a report to storage""" @@ -110,8 +109,8 @@ def stripDuplicates(self): import re filemask = {} - for dirname, files in six.iteritems(self.test_result): - for name, status in six.iteritems(files): + for dirname, files in self.test_result.items(): + for name, status in files.items(): fname = os.path.join(dirname, name) filemask[fname] = status @@ -170,11 +169,11 @@ def test(self, previous = None, timeout = -1): #support updating to speed things up prev_results = {} if previous is not None: - for name, status in six.iteritems(previous['Files']): + for name, status in previous['Files'].items(): prev_results[name] = status filesToTest = self.sortByBaseDir(self.listRootFiles(self.directory)) - for dir, filelist in six.iteritems(filesToTest): + for dir, filelist in filesToTest.items(): filemask = {} #apply a UNIX wildcard if specified filtered = filelist @@ -219,9 +218,9 @@ def report(self): print('DBS Dataset name: %s' % self.options.name) print('Storage path: %s' % self.topdir) - for dirname, files in six.iteritems(self.test_result): + for dirname, files in self.test_result.items(): print('Directory: %s' % dirname) - for name, status in six.iteritems(files): + for name, status in files.items(): fname = os.path.join(dirname, name) if not fname in self.duplicates: print('\t\t %s: %s' % (name, str(status))) @@ -260,9 +259,9 @@ def structured(self): 'DateCreated':datetime.datetime.now().strftime("%s"), 'Files':{}} - for dirname, files in six.iteritems(self.test_result): + for dirname, files in self.test_result.items(): report['PathList'].append(dirname) - for name, status in six.iteritems(files): + for name, status in files.items(): fname = os.path.join(dirname, name) report['Files'][fname] = status if status[0]: diff --git a/PhysicsTools/HeppyCore/python/utils/production_tasks.py b/PhysicsTools/HeppyCore/python/utils/production_tasks.py index 2a1ff6be8ea0d..2f1a3d7d2b1b3 100644 --- a/PhysicsTools/HeppyCore/python/utils/production_tasks.py +++ b/PhysicsTools/HeppyCore/python/utils/production_tasks.py @@ -15,7 +15,6 @@ from .dataset import Dataset from .datasetToSource import createDataset from .castorBaseDir import castorBaseDir -import six def mkdir_p(path): try: @@ -667,7 +666,7 @@ def checkStatus(stat): actions = {'FilesToCompress':{'Files':[]}} result = {} - for j, id in six.iteritems(jobs): + for j, id in jobs.items(): if id is None: result[j] = 'UNKNOWN' else: @@ -697,7 +696,7 @@ def checkStatus(stat): def countJobs(stat): """Count jobs that are monitorable - i.e. not in a final state""" result = [] - for j, id in six.iteritems(jobs): + for j, id in jobs.items(): if id is not None and id in stat: st = stat[id] if st in ['PEND','PSUSP','RUN','USUSP','SSUSP','WAIT']: @@ -744,7 +743,7 @@ def run(self, input): job_status = input['MonitorJobs']['LSFJobStatus'] result = {} - for j, status in six.iteritems(job_status): + for j, status in job_status.items(): valid = True if os.path.exists(status): @@ -809,7 +808,7 @@ def run(self, input): #collect a list of jobs by status states = {} - for j, status in six.iteritems(report['LSFJobStatusCheck']): + for j, status in report['LSFJobStatusCheck'].items(): if status not in states: states[status] = [] states[status].append(j) @@ -831,7 +830,7 @@ def run(self, input): if self.options.group is not None: user_group = '-G %s' % self.options.group - for status, jobs in six.iteritems(states): + for status, jobs in states.items(): output.write('# %d jobs found in state %s\n' % (len(jobs),status) ) if status == 'VALID': continue diff --git a/PhysicsTools/HeppyCore/scripts/heppy_hadd.py b/PhysicsTools/HeppyCore/scripts/heppy_hadd.py index 8eab2256e0bca..3b9b752023942 100755 --- a/PhysicsTools/HeppyCore/scripts/heppy_hadd.py +++ b/PhysicsTools/HeppyCore/scripts/heppy_hadd.py @@ -7,7 +7,6 @@ import pprint import pickle import shutil -import six MAX_ARG_STRLEN = 131072 @@ -114,7 +113,7 @@ def haddChunks(idir, removeDestDir, cleanUp=False, odir_cmd='./'): if len(chunks)==0: print('warning: no chunk found.') return - for comp, cchunks in six.iteritems(chunks): + for comp, cchunks in chunks.items(): odir = odir_cmd+'/'+'/'.join( [idir, comp] ) print(odir, cchunks) if removeDestDir: @@ -127,7 +126,7 @@ def haddChunks(idir, removeDestDir, cleanUp=False, odir_cmd='./'): shutil.rmtree(chunkDir) os.mkdir(chunkDir) print(chunks) - for comp, chunks in six.iteritems(chunks): + for comp, chunks in chunks.items(): for chunk in chunks: shutil.move(chunk, chunkDir) diff --git a/PhysicsTools/NanoAOD/test/inspectNanoFile.py b/PhysicsTools/NanoAOD/test/inspectNanoFile.py index 6232478ee4399..30cb1e10eb821 100755 --- a/PhysicsTools/NanoAOD/test/inspectNanoFile.py +++ b/PhysicsTools/NanoAOD/test/inspectNanoFile.py @@ -3,7 +3,6 @@ from builtins import range import sys, os.path, json from collections import defaultdict -import six import ROOT ROOT.PyConfig.IgnoreCommandLineOptions = True ROOT.gROOT.SetBatch(True) @@ -12,7 +11,7 @@ class FileData: def __init__(self,data): self._json = data - for k,v in six.iteritems(data): + for k,v in data.items(): setattr(self,k,v) self.Events = self.trees["Events"] self.nevents = self.Events["entries"] @@ -105,7 +104,7 @@ def inspectRootFile(infile): else: b.entries = entries c1 = ROOT.TCanvas("c1","c1") - for counter,countees in six.iteritems(counters): + for counter,countees in counters.items(): n = tree.Draw(counter+">>htemp") if n != 0: htemp = ROOT.gROOT.FindObject("htemp") @@ -129,7 +128,7 @@ def inspectRootFile(infile): if head not in branchgroups: branchgroups[head] = BranchGroup(head) branchgroups[head].append(b) - for bg in six.itervalues(branchgroups): + for bg in branchgroups.values(): if bg.name in toplevelDoc: bg.doc = toplevelDoc[bg.name] kind = bg.getKind() @@ -140,14 +139,14 @@ def inspectRootFile(infile): for counter in set(s.counter for s in bg.subs if not s.single): bg.append(branchmap[counter]) allsize_c = sum(b.tot for b in allbranches) - allsize = sum(b.tot for b in six.itervalues(branchgroups)) + allsize = sum(b.tot for b in branchgroups.values()) if abs(allsize_c - allsize) > 1e-6*(allsize_c+allsize): sys.stderr.write("Total size mismatch for tree %s: %10.4f kb vs %10.4f kb\n" % (treeName, allsize, allsize_c)) trees[treeName] = dict( entries = entries, allsize = allsize, branches = dict(b.toJSON() for b in allbranches), - branchgroups = dict(bg.toJSON() for bg in six.itervalues(branchgroups)), + branchgroups = dict(bg.toJSON() for bg in branchgroups.values()), ) c1.Close() break # only Event tree for now @@ -157,7 +156,7 @@ def inspectRootFile(infile): def makeSurvey(treeName, treeData): allsize = treeData['allsize'] entries = treeData['entries'] - survey = list(six.itervalues(treeData['branchgroups'])) + survey = list(treeData['branchgroups'].values()) survey.sort(key = lambda bg : - bg['tot']) scriptdata = [] runningtotal = 0 diff --git a/PhysicsTools/NanoAOD/test/prepareDQM.py b/PhysicsTools/NanoAOD/test/prepareDQM.py index 20b42d5336a2a..dc0f7aec57459 100644 --- a/PhysicsTools/NanoAOD/test/prepareDQM.py +++ b/PhysicsTools/NanoAOD/test/prepareDQM.py @@ -4,7 +4,6 @@ import sys, os from collections import defaultdict from math import ceil, pi, log -import six import ROOT ROOT.PyConfig.IgnoreCommandLineOptions = True ROOT.gROOT.SetBatch(True) @@ -120,7 +119,7 @@ def autoPlot1D(name, col, branch): existing = set(x[0] for x in allplots) found = set() title = dict( (n,x.title.value()) for (n,x) in allplots if x.kind.value() != "none" and x.title.value() ) - for (t,branch) in sorted(six.iteritems(branchgroups[head])): + for (t,branch) in sorted(branchgroups[head].items()): t_noat = t.replace("@","_") found.add(t_noat) if t_noat not in title: title[t_noat] = branch.title diff --git a/PhysicsTools/PatAlgos/python/slimming/customizeMiniAOD_HcalFixLegacy2016.py b/PhysicsTools/PatAlgos/python/slimming/customizeMiniAOD_HcalFixLegacy2016.py index d98b7e4e85d28..31508ac5680b8 100644 --- a/PhysicsTools/PatAlgos/python/slimming/customizeMiniAOD_HcalFixLegacy2016.py +++ b/PhysicsTools/PatAlgos/python/slimming/customizeMiniAOD_HcalFixLegacy2016.py @@ -3,7 +3,6 @@ from PhysicsTools.PatAlgos.tools.helpers import MassSearchReplaceAnyInputTagVisitor, addKeepStatement from PhysicsTools.PatAlgos.tools.helpers import getPatAlgosToolsTask -import six def loadJetMETBTag(process): @@ -58,7 +57,7 @@ def cleanPfCandidates(process, verbose=False): replacePFCandidates = MassSearchReplaceAnyInputTagVisitor("particleFlow", "pfCandidateRecalibrator", verbose=verbose) replacePFTmpPtrs = MassSearchReplaceAnyInputTagVisitor("particleFlowTmpPtrs", "particleFlowPtrs", verbose=verbose) for everywhere in [ process.producers, process.filters, process.analyzers, process.psets, process.vpsets ]: - for name,obj in six.iteritems(everywhere): + for name,obj in everywhere.items(): if obj != process.pfCandidateRecalibrator: replacePFCandidates.doIt(obj, name) replacePFTmpPtrs.doIt(obj, name) diff --git a/PhysicsTools/PatAlgos/python/slimming/customizeMiniAOD_MuEGFixMoriond2017.py b/PhysicsTools/PatAlgos/python/slimming/customizeMiniAOD_MuEGFixMoriond2017.py index 4ffa87b617213..a78da00814899 100644 --- a/PhysicsTools/PatAlgos/python/slimming/customizeMiniAOD_MuEGFixMoriond2017.py +++ b/PhysicsTools/PatAlgos/python/slimming/customizeMiniAOD_MuEGFixMoriond2017.py @@ -5,7 +5,6 @@ #from RecoEgamma.EgammaTools.egammaGainSwitchFixToolsForPAT_cff import customizeGSFixForPAT from PhysicsTools.PatAlgos.tools.helpers import getPatAlgosToolsTask, addToProcessAndTask import sys -import six def addBadMuonFilters(process): @@ -35,7 +34,7 @@ def cleanPFCandidates(process, badMuons, verbose=False): needOriginalMuons = [ process.muonsCleaned ] + [ getattr(process,l.moduleLabel) for l in badMuons ] replacePFCandidates = MassSearchReplaceAnyInputTagVisitor("particleFlow", "pfCandidatesBadMuonsCleaned", verbose=verbose) for everywhere in [ process.producers, process.filters, process.analyzers, process.psets, process.vpsets ]: - for name,obj in six.iteritems(everywhere): + for name,obj in everywhere.items(): if obj not in needOriginalMuons: replaceMuons.doIt(obj, name) if obj != process.pfCandidatesBadMuonsCleaned: diff --git a/PhysicsTools/PatAlgos/python/slimming/extraJets_MuEGFixMoriond2017.py b/PhysicsTools/PatAlgos/python/slimming/extraJets_MuEGFixMoriond2017.py index ea2e1183f50a6..05d239cbd941c 100644 --- a/PhysicsTools/PatAlgos/python/slimming/extraJets_MuEGFixMoriond2017.py +++ b/PhysicsTools/PatAlgos/python/slimming/extraJets_MuEGFixMoriond2017.py @@ -2,7 +2,6 @@ from PhysicsTools.PatAlgos.tools.helpers import listDependencyChain, massSearchReplaceAnyInputTag, cloneProcessingSnippet, addKeepStatement,listModules from PhysicsTools.PatAlgos.tools.helpers import getPatAlgosToolsTask, addToProcessAndTask -import six def makeRecoJetCollection(process, pfCandCollection, @@ -93,7 +92,7 @@ def backupJetsSecondStep(process, sequences, badMuons, verbose=False): task = getPatAlgosToolsTask(process) # put back the old input tags and copy in task - for sequence in six.itervalues(sequences): + for sequence in sequences.values(): massSearchReplaceAnyInputTag(sequence, "pfCandidatesBadMuonsCleaned", "particleFlow") massSearchReplaceAnyInputTag(sequence, "muonsCleaned", "muons") for mod in listModules(sequence): diff --git a/PhysicsTools/PatAlgos/python/tools/helpers.py b/PhysicsTools/PatAlgos/python/tools/helpers.py index ccd2f7c88b8e4..a1d4a7851a63d 100644 --- a/PhysicsTools/PatAlgos/python/tools/helpers.py +++ b/PhysicsTools/PatAlgos/python/tools/helpers.py @@ -1,7 +1,6 @@ from __future__ import print_function import FWCore.ParameterSet.Config as cms import sys -import six ## Helpers to perform some technically boring tasks like looking for all modules with a given parameter ## and replacing that to a given value @@ -276,7 +275,7 @@ def listDependencyChain(process, module, sources, verbose=False): """ def allDirectInputModules(moduleOrPSet,moduleName,attrName): ret = set() - for name,value in six.iteritems(moduleOrPSet.parameters_()): + for name,value in moduleOrPSet.parameters_().items(): type = value.pythonTypeName() if type == 'cms.PSet': ret.update(allDirectInputModules(value,moduleName,moduleName+"."+name)) @@ -354,7 +353,7 @@ def flattenRevDeps(flatgraph, revdepgraph, tip): def addKeepStatement(process, oldKeep, newKeeps, verbose=False): """Add new keep statements to any PoolOutputModule of the process that has the old keep statements""" - for name,out in six.iteritems(process.outputModules): + for name,out in process.outputModules.items(): if out.type_() == 'PoolOutputModule' and hasattr(out, "outputCommands"): if oldKeep in out.outputCommands: out.outputCommands += newKeeps diff --git a/PhysicsTools/PythonAnalysis/python/cmscompleter.py b/PhysicsTools/PythonAnalysis/python/cmscompleter.py index 9551f5b32a326..2b6a111d850a6 100644 --- a/PhysicsTools/PythonAnalysis/python/cmscompleter.py +++ b/PhysicsTools/PythonAnalysis/python/cmscompleter.py @@ -11,7 +11,7 @@ import readline import rlcompleter -from six import builtins +import builtins import __main__ __all__ = ["CMSCompleter"] diff --git a/PhysicsTools/PythonAnalysis/python/cmstools.py b/PhysicsTools/PythonAnalysis/python/cmstools.py index 7081c9c9f0744..f37da3a0925b6 100644 --- a/PhysicsTools/PythonAnalysis/python/cmstools.py +++ b/PhysicsTools/PythonAnalysis/python/cmstools.py @@ -8,7 +8,6 @@ from builtins import range import re import ROOT -import six import sys ### define tab completion try: @@ -115,7 +114,7 @@ def index(self): def tree(self): return self._tree def __setBranchIndicies(self): - for branch in six.itervalues(self._usedBranches): + for branch in self._usedBranches.values(): branch.setIndex(self._index) def __getattr__(self, name): return self.branch(name) diff --git a/PhysicsTools/PythonAnalysis/test/BuildFile.xml b/PhysicsTools/PythonAnalysis/test/BuildFile.xml index 81eb26a674943..9eb7136ad4df3 100644 --- a/PhysicsTools/PythonAnalysis/test/BuildFile.xml +++ b/PhysicsTools/PythonAnalysis/test/BuildFile.xml @@ -120,7 +120,6 @@ - diff --git a/PhysicsTools/Utilities/scripts/pileupDistInMC.py b/PhysicsTools/Utilities/scripts/pileupDistInMC.py index 544c08f86c405..a90f6c2b2cb5a 100755 --- a/PhysicsTools/Utilities/scripts/pileupDistInMC.py +++ b/PhysicsTools/Utilities/scripts/pileupDistInMC.py @@ -4,7 +4,6 @@ import optparse import re from pprint import pprint -import six commentRE = re.compile (r'#.*$') @@ -75,7 +74,7 @@ print("normalized:") renormDict = {} - for key, count in six.iteritems(countDict): + for key, count in countDict.items(): renormDict[key] = count / total pprint (renormDict) diff --git a/RecoBTag/ONNXRuntime/test/test_deep_doubleb_cfg_AOD.py b/RecoBTag/ONNXRuntime/test/test_deep_doubleb_cfg_AOD.py index 8c82f56a62006..b21627033eb71 100644 --- a/RecoBTag/ONNXRuntime/test/test_deep_doubleb_cfg_AOD.py +++ b/RecoBTag/ONNXRuntime/test/test_deep_doubleb_cfg_AOD.py @@ -1,7 +1,6 @@ from __future__ import print_function import FWCore.ParameterSet.Config as cms from PhysicsTools.PatAlgos.tools.helpers import getPatAlgosToolsTask -import six process = cms.Process("PAT") @@ -124,9 +123,9 @@ #Trick to make it work in >=9_1_X process.tsk = cms.Task() -for mod in six.itervalues(process.producers_()): +for mod in process.producers_().values(): process.tsk.add(mod) -for mod in six.itervalues(process.filters_()): +for mod in process.filters_().values(): process.tsk.add(mod) process.p = cms.Path( diff --git a/RecoLuminosity/LumiDB/scripts/makePileupJSON.py b/RecoLuminosity/LumiDB/scripts/makePileupJSON.py index 2317e7642ff59..ee69a8122e8d6 100755 --- a/RecoLuminosity/LumiDB/scripts/makePileupJSON.py +++ b/RecoLuminosity/LumiDB/scripts/makePileupJSON.py @@ -4,7 +4,6 @@ import RecoLuminosity.LumiDB.LumiConstants as LumiConstants import re from math import sqrt -import six ############################## ## ######################## ## diff --git a/RecoLuminosity/LumiDB/scripts/pileupCalc.py b/RecoLuminosity/LumiDB/scripts/pileupCalc.py index 4b77369b54c73..f1150daaf61a9 100755 --- a/RecoLuminosity/LumiDB/scripts/pileupCalc.py +++ b/RecoLuminosity/LumiDB/scripts/pileupCalc.py @@ -8,7 +8,6 @@ from RecoLuminosity.LumiDB import selectionParser from math import exp from math import sqrt -import six def parseInputFile(inputfilename): ''' @@ -224,7 +223,7 @@ def fillPileupHistogram (lumiInfo, calcOption, hist, minbXsec, Nbins, run, ls): # now, we have to find the information for the input runs and lumi sections # in the Lumi/Pileup list. First, loop over inputs - for (run, lslist) in sorted (six.iteritems(inputRange)): + for (run, lslist) in sorted (inputRange.items()): # now, look for matching run, then match lumi sections # print "searching for run %d" % (run) if run in inputPileupRange.keys(): diff --git a/RecoLuminosity/LumiDB/scripts/pileupReCalc_HLTpaths.py b/RecoLuminosity/LumiDB/scripts/pileupReCalc_HLTpaths.py index c70e97bb94589..58acc3eee81c1 100755 --- a/RecoLuminosity/LumiDB/scripts/pileupReCalc_HLTpaths.py +++ b/RecoLuminosity/LumiDB/scripts/pileupReCalc_HLTpaths.py @@ -8,7 +8,6 @@ from RecoLuminosity.LumiDB import csvLumibyLSParser from math import exp from math import sqrt -import six def parseInputFile(inputfilename): ''' @@ -96,7 +95,7 @@ def parseInputFile(inputfilename): OUTPUTLINE = "" OUTPUTLINE+='{' - for (run, lslist) in sorted (six.iteritems(inputRange)): + for (run, lslist) in sorted (inputRange.items()): # now, look for matching run, then match lumi sections #print "searching for run %d" % (run) if run in inputPileupRange.keys(): diff --git a/RecoLuminosity/LumiDB/scripts/pileupReCalc_Lumis.py b/RecoLuminosity/LumiDB/scripts/pileupReCalc_Lumis.py index 7ed76ea75eb32..a01e9007f0f15 100755 --- a/RecoLuminosity/LumiDB/scripts/pileupReCalc_Lumis.py +++ b/RecoLuminosity/LumiDB/scripts/pileupReCalc_Lumis.py @@ -8,7 +8,6 @@ from RecoLuminosity.LumiDB import csvLumibyLSParser from math import exp from math import sqrt -import six def parseInputFile(inputfilename): ''' @@ -97,7 +96,7 @@ def parseInputFile(inputfilename): # loop over pileup JSON as source, since it should have more lumi sections - for (run, LSPUlist) in sorted (six.iteritems(inputPileupRange)): + for (run, LSPUlist) in sorted (inputPileupRange.items()): # now, look for matching run, then match lumi sections #print "searching for run %d" % (run) if run in inputRange.keys(): diff --git a/RecoTauTag/Configuration/python/tools/adaptToRunAtMiniAOD.py b/RecoTauTag/Configuration/python/tools/adaptToRunAtMiniAOD.py index 5de6298be326e..e19ab6c984573 100644 --- a/RecoTauTag/Configuration/python/tools/adaptToRunAtMiniAOD.py +++ b/RecoTauTag/Configuration/python/tools/adaptToRunAtMiniAOD.py @@ -1,5 +1,4 @@ import FWCore.ParameterSet.Config as cms -import six ###### # Tools to adapt Tau sequences to run tau ReReco+PAT at MiniAOD samples @@ -116,7 +115,7 @@ def adaptTauToMiniAODReReco(process, reclusterJets=True): process.combinatoricRecoTaus.jetSrc = jetCollection # Adapt builders for builder in process.combinatoricRecoTaus.builders: - for name,value in six.iteritems(builder.parameters_()): + for name,value in builder.parameters_().items(): if name == 'qualityCuts': builder.qualityCuts.primaryVertexSrc = 'offlineSlimmedPrimaryVertices' elif name == 'pfCandSrc': @@ -130,7 +129,7 @@ def adaptTauToMiniAODReReco(process, reclusterJets=True): elif mod.name.value() == 'TTIworkaround': modifiersToRemove_.append(mod) continue - for name,value in six.iteritems(mod.parameters_()): + for name,value in mod.parameters_().items(): if name == 'qualityCuts': mod.qualityCuts.primaryVertexSrc = 'offlineSlimmedPrimaryVertices' for mod in modifiersToRemove_: @@ -167,7 +166,7 @@ def adaptTauToMiniAODReReco(process, reclusterJets=True): process.tauMatch.matched = cms.InputTag("prunedGenParticles") # Remove unsupported tauIDs - for name, src in six.iteritems(process.patTaus.tauIDSources.parameters_()): + for name, src in process.patTaus.tauIDSources.parameters_().items(): if name.find('againstElectron') > -1 or name.find('againstMuon') > -1: if name.find('againstElectronDeadECAL') > -1: continue delattr(process.patTaus.tauIDSources,name) diff --git a/RecoTauTag/RecoTau/python/TauDiscriminatorTools.py b/RecoTauTag/RecoTau/python/TauDiscriminatorTools.py index b48d0099b6a27..77ec1f74c9501 100644 --- a/RecoTauTag/RecoTau/python/TauDiscriminatorTools.py +++ b/RecoTauTag/RecoTau/python/TauDiscriminatorTools.py @@ -1,5 +1,4 @@ import FWCore.ParameterSet.Config as cms -import six # require the EXISTANCE of a track - not necessarily above any pt cut (above the basic 0.5 GeV filter) leadTrackFinding = cms.PSet( @@ -27,7 +26,7 @@ def subParameterSets(pSet): ''' Generator to return all sub-PSets in a PSet ''' - for name, value in six.iteritems(pSet.parameters_()): + for name, value in pSet.parameters_().items(): if isinstance(value, cms.PSet): yield getattr(pSet, name) diff --git a/RecoTauTag/RecoTau/python/tools/runTauIdMVA.py b/RecoTauTag/RecoTau/python/tools/runTauIdMVA.py index c06a99fe7c0cf..e26e253b38008 100644 --- a/RecoTauTag/RecoTau/python/tools/runTauIdMVA.py +++ b/RecoTauTag/RecoTau/python/tools/runTauIdMVA.py @@ -4,7 +4,6 @@ from RecoTauTag.RecoTau.PATTauDiscriminationByMVAIsolationRun2_cff import patDiscriminationByIsolationMVArun2v1raw, patDiscriminationByIsolationMVArun2v1 import os import re -import six class TauIDEmbedder(object): """class to rerun the tau seq and acces trainings from the database""" @@ -1013,12 +1012,12 @@ def tauIDMVAinputs(module, wp): def processDeepProducer(self, producer_name, tauIDSources, workingPoints_): - for target,points in six.iteritems(workingPoints_): + for target,points in workingPoints_.items(): setattr(tauIDSources, 'by{}VS{}raw'.format(producer_name[0].upper()+producer_name[1:], target), cms.PSet(inputTag = cms.InputTag(producer_name+self.postfix, 'VS{}'.format(target)), workingPointIndex = cms.int32(-1))) cut_expressions = [] - for index, (point,cut) in enumerate(six.iteritems(points)): + for index, (point,cut) in enumerate(points.items()): cut_expressions.append(str(cut)) setattr(tauIDSources, 'by{}{}VS{}'.format(point, producer_name[0].upper()+producer_name[1:], target), diff --git a/RecoTracker/Configuration/python/customiseEarlyDeleteForSeeding.py b/RecoTracker/Configuration/python/customiseEarlyDeleteForSeeding.py index e022730388fd1..7c6d6058d2da3 100644 --- a/RecoTracker/Configuration/python/customiseEarlyDeleteForSeeding.py +++ b/RecoTracker/Configuration/python/customiseEarlyDeleteForSeeding.py @@ -1,7 +1,6 @@ import FWCore.ParameterSet.Config as cms import collections -import six def customiseEarlyDeleteForSeeding(process, products): # Find the producers @@ -10,7 +9,7 @@ def customiseEarlyDeleteForSeeding(process, products): def _branchName(productType, moduleLabel, instanceLabel=""): return "%s_%s_%s_%s" % (productType, moduleLabel, instanceLabel, process.name_()) - for name, module in six.iteritems(process.producers_()): + for name, module in process.producers_().items(): cppType = module._TypedParameterizable__type if cppType == "HitPairEDProducer": if module.produceSeedingHitSets: diff --git a/RecoVertex/BeamSpotProducer/scripts/CommonMethods.py b/RecoVertex/BeamSpotProducer/scripts/CommonMethods.py index 1e1b705ff533e..48245f411717e 100644 --- a/RecoVertex/BeamSpotProducer/scripts/CommonMethods.py +++ b/RecoVertex/BeamSpotProducer/scripts/CommonMethods.py @@ -3,7 +3,6 @@ import math, re, optparse, commands, os, sys, time, datetime from BeamSpotObj import BeamSpot from IOVObj import IOV -import six lockFile = ".lock" @@ -131,7 +130,7 @@ def parse(docstring, arglist=None): ########################################################################################### def nonzero(self): # will become the nonzero method of optparse.Values "True if options were given" - for v in six.itervalues(self.__dict__): + for v in self.__dict__.values(): if v is not None: return True return False diff --git a/RecoVertex/BeamSpotProducer/scripts/beamvalidation.py b/RecoVertex/BeamSpotProducer/scripts/beamvalidation.py index 12cc90138de18..268d8b5fed14b 100644 --- a/RecoVertex/BeamSpotProducer/scripts/beamvalidation.py +++ b/RecoVertex/BeamSpotProducer/scripts/beamvalidation.py @@ -31,7 +31,6 @@ from builtins import range import os, string, re, sys, math import commands, time -import six #_______________OPTIONS________________ import optparse @@ -40,7 +39,7 @@ def nonzero(self): # will become the nonzero method of optparse.Values "True if options were given" - for v in six.itervalues(self.__dict__): + for v in self.__dict__.values(): if v is not None: return True return False diff --git a/RecoVertex/BeamSpotProducer/scripts/getBeamSpotDB.py b/RecoVertex/BeamSpotProducer/scripts/getBeamSpotDB.py index 0e696fb5eabc1..99ae70effe6b6 100755 --- a/RecoVertex/BeamSpotProducer/scripts/getBeamSpotDB.py +++ b/RecoVertex/BeamSpotProducer/scripts/getBeamSpotDB.py @@ -33,7 +33,6 @@ import sys,os, re import commands -import six #_______________OPTIONS________________ import optparse @@ -42,7 +41,7 @@ def nonzero(self): # will become the nonzero method of optparse.Values "True if options were given" - for v in six.itervalues(self.__dict__): + for v in self.__dict__.values(): if v is not None: return True return False diff --git a/SimG4CMS/HGCalTestBeam/test/HGCalTBGenSimDigiReco_cfg.py b/SimG4CMS/HGCalTestBeam/test/HGCalTBGenSimDigiReco_cfg.py index ce60c17db9efa..be8d131e85d05 100644 --- a/SimG4CMS/HGCalTestBeam/test/HGCalTBGenSimDigiReco_cfg.py +++ b/SimG4CMS/HGCalTestBeam/test/HGCalTBGenSimDigiReco_cfg.py @@ -1,5 +1,4 @@ import FWCore.ParameterSet.Config as cms -import six process = cms.Process('SIMDIGIRECO') @@ -122,7 +121,7 @@ for path in process.paths: getattr(process,path)._seq = process.generator * getattr(process,path)._seq -for label, prod in six.iteritems(process.producers_()): +for label, prod in process.producers_().items(): if prod.type_() == "OscarMTProducer": # ugly hack prod.__dict__['_TypedParameterizable__type'] = "OscarProducer" diff --git a/SimG4CMS/HGCalTestBeam/test/HGCalTBGenSimDigi_cfg.py b/SimG4CMS/HGCalTestBeam/test/HGCalTBGenSimDigi_cfg.py index 0d8e070f47731..c1643780982f9 100644 --- a/SimG4CMS/HGCalTestBeam/test/HGCalTBGenSimDigi_cfg.py +++ b/SimG4CMS/HGCalTestBeam/test/HGCalTBGenSimDigi_cfg.py @@ -1,5 +1,4 @@ import FWCore.ParameterSet.Config as cms -import six process = cms.Process('SIMDIGI') @@ -120,7 +119,7 @@ for path in process.paths: getattr(process,path)._seq = process.generator * getattr(process,path)._seq -for label, prod in six.iteritems(process.producers_()): +for label, prod in process.producers_().items(): if prod.type_() == "OscarMTProducer": # ugly hack prod.__dict__['_TypedParameterizable__type'] = "OscarProducer" diff --git a/TauAnalysis/MCEmbeddingTools/python/customisers.py b/TauAnalysis/MCEmbeddingTools/python/customisers.py index 5e806a2c0b414..a608856d96dc1 100644 --- a/TauAnalysis/MCEmbeddingTools/python/customisers.py +++ b/TauAnalysis/MCEmbeddingTools/python/customisers.py @@ -5,7 +5,6 @@ from __future__ import print_function import FWCore.ParameterSet.Config as cms -import six ################################ Customizer for skimming ########################### ### There are four different parts. @@ -67,7 +66,7 @@ def __init__(self, module_name, manipulator_name, steps = ["SELECT","CLEAN","SIM def modify_outputModules(process, keep_drop_list = [], module_veto_list = [] ): - outputModulesList = [key for key,value in six.iteritems(process.outputModules)] + outputModulesList = [key for key,value in process.outputModules.items()] for outputModule in outputModulesList: if outputModule in module_veto_list: continue @@ -112,7 +111,7 @@ def customiseSelecting(process,reselect=False): process.selecting = cms.Path(process.makePatMuonsZmumuSelection) process.schedule.insert(-1, process.selecting) - outputModulesList = [key for key,value in six.iteritems(process.outputModules)] + outputModulesList = [key for key,value in process.outputModules.items()] for outputModule in outputModulesList: outputModule = getattr(process, outputModule) outputModule.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring("selecting")) @@ -466,7 +465,7 @@ def customiseFilterTTbartoMuMu(process): def customiseMCFilter(process): process.schedule.insert(-1,process.MCFilter) - outputModulesList = [key for key,value in six.iteritems(process.outputModules)] + outputModulesList = [key for key,value in process.outputModules.items()] for outputModule in outputModulesList: outputModule = getattr(process, outputModule) outputModule.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring("MCFilter")) diff --git a/Utilities/RelMon/python/web/app_utils.py b/Utilities/RelMon/python/web/app_utils.py index a7042dbd01f46..5859c7894e614 100755 --- a/Utilities/RelMon/python/web/app_utils.py +++ b/Utilities/RelMon/python/web/app_utils.py @@ -15,7 +15,6 @@ from urllib import quote from functools import reduce -import six renaming = { 'MessageLogger': 'Miscellanea', 'FourVector': 'Generic', @@ -247,7 +246,7 @@ def get_release_summary_stats(c, release_title, st_test, threshold=1e-5): # Fetch stats summary_stats = dict() detailed_stats = dict() - for name, ranges in six.iteritems(cum_lvl3_dir_ranges): + for name, ranges in cum_lvl3_dir_ranges.items(): successes, nulls, fails = get_stats(c, threshold, ranges) if name in detailed_stats: detailed_stats[name][0] += successes @@ -265,13 +264,13 @@ def get_release_summary_stats(c, release_title, st_test, threshold=1e-5): # Calculate ratio summary_ratios = [] - for name, stats in six.iteritems(summary_stats): + for name, stats in summary_stats.items(): total = sum(stats) if total: ratio = float(stats[0]) / sum(stats) summary_ratios.append((name, ratio)) detailed_ratios = [] - for name, stats in six.iteritems(detailed_stats): + for name, stats in detailed_stats.items(): total = sum(stats) if total: ratio = float(stats[0]) / sum(stats) diff --git a/Utilities/ReleaseScripts/scripts/duplicateReflexLibrarySearch.py b/Utilities/ReleaseScripts/scripts/duplicateReflexLibrarySearch.py index 3d62361eb02fa..3a4f0f752dc52 100755 --- a/Utilities/ReleaseScripts/scripts/duplicateReflexLibrarySearch.py +++ b/Utilities/ReleaseScripts/scripts/duplicateReflexLibrarySearch.py @@ -8,7 +8,6 @@ import pprint import subprocess from XML2Python import xml2obj -import six try: from subprocess import getoutput except: @@ -188,7 +187,7 @@ def searchClassDefXml (): className = stdRE.sub ('', className) # print " ", className # Now get rid of any typedefs - for typedef, tdList in six.iteritems(typedefsDict): + for typedef, tdList in typedefsDict.items(): for alias in tdList: className = re.sub (alias, typedef, className) classDict.setdefault (className, set()).add (filename) @@ -234,7 +233,7 @@ def searchClassDefXml (): print('\n%s\n%s\n' % (filename, dupProblems)) # for filename if options.dups: - for name, fileSet in sorted( six.iteritems(classDict) ): + for name, fileSet in sorted( classDict.items() ): if len (fileSet) < 2: continue print(name) diff --git a/Validation/Configuration/python/ECALHCAL.py b/Validation/Configuration/python/ECALHCAL.py index 1c4e18046144b..c23cd4db1433e 100644 --- a/Validation/Configuration/python/ECALHCAL.py +++ b/Validation/Configuration/python/ECALHCAL.py @@ -1,4 +1,3 @@ -import six import FWCore.ParameterSet.Config as cms def customise(process): @@ -30,8 +29,7 @@ def customise(process): # modify the content #process.output.outputCommands.append("keep *_simHcalUnsuppressedDigis_*_*") - six.next(six.iteritems(process.outputModules_()))[1].outputCommands.append("keep *_simHcalUnsuppressedDigis_*_*") - + next(iter(process.outputModules_().items()))[1].outputCommands.append("keep *_simHcalUnsuppressedDigis_*_*") # user schedule: use only calorimeters digitization and local reconstruction del process.schedule[:] @@ -84,6 +82,5 @@ def customise(process): process.schedule.append(process.endjob_step) #process.schedule.append(process.out_step) - process.schedule.append(getattr(process,six.next(six.iteritems(process.outputModules_()))[0]+"_step")) return(process) diff --git a/Validation/Geometry/test/MaterialBudget.py b/Validation/Geometry/test/MaterialBudget.py index 5fce383d89e71..3528a2da4b7a0 100644 --- a/Validation/Geometry/test/MaterialBudget.py +++ b/Validation/Geometry/test/MaterialBudget.py @@ -3,7 +3,6 @@ # Pure trick to start ROOT in batch mode, pass this only option to it # and the rest of the command line options to this code. from __future__ import print_function -import six import sys oldargv = sys.argv[:] sys.argv = [ '-b-' ] @@ -321,7 +320,7 @@ def setRanges(h): counter = 0 legends = OrderedDict() #KeepAlive - for label, [num, color, leg] in six.iteritems(hist_label_to_num): + for label, [num, color, leg] in hist_label_to_num.items(): mainPad[counter].cd() oldHistos[label] = get1DHisto_(detector, @@ -368,7 +367,7 @@ def setRanges(h): mainPadDiff, subPadDiff = setUpCanvas(canDiff) counter = 0 - for label, [num, color, leg] in six.iteritems(hist_label_to_num): + for label, [num, color, leg] in hist_label_to_num.items(): mainPadDiff[counter].cd() oldHistos[label].SetTitle(setUpTitle(detector,leg,plot)) oldHistos[label].Draw("HIST") @@ -551,7 +550,7 @@ def createPlots_(plot, geometry): hist_X0_IB = None hist_X0_elements = OrderedDict() - for subDetector,color in six.iteritems(DETECTORS): + for subDetector,color in DETECTORS.items(): h = get1DHisto_(subDetector,plots[plot].plotNumber,geometry) if not h: print('Warning: Skipping %s'%subDetector) @@ -567,7 +566,7 @@ def createPlots_(plot, geometry): ) # category profiles - for label, [num, color, leg] in six.iteritems(hist_label_to_num): + for label, [num, color, leg] in hist_label_to_num.items(): if label is 'SUM': continue hist_label = get1DHisto_(subDetector, num + plots[plot].plotNumber, geometry) hist_X0_elements[label] = assignOrAddIfExists_( @@ -585,7 +584,7 @@ def createPlots_(plot, geometry): cumulative_matbdg.SetDirectory(0) # colors - for det, color in six.iteritems(DETECTORS): + for det, color in DETECTORS.items(): setColorIfExists_(hist_X0_detectors, det, color) # First Plot: BeamPipe + Pixel + TIB/TID + TOB + TEC + Outside @@ -593,7 +592,7 @@ def createPlots_(plot, geometry): stackTitle_SubDetectors = "Tracker Material Budget;%s;%s" % ( plots[plot].abscissa,plots[plot].ordinate) stack_X0_SubDetectors = THStack("stack_X0",stackTitle_SubDetectors) - for det, histo in six.iteritems(hist_X0_detectors): + for det, histo in hist_X0_detectors.items(): stack_X0_SubDetectors.Add(histo) cumulative_matbdg.Add(histo, 1) @@ -616,7 +615,7 @@ def createPlots_(plot, geometry): theLegend_SubDetectors.SetFillStyle(0) theLegend_SubDetectors.SetBorderSize(0) - for det, histo in six.iteritems(hist_X0_detectors): + for det, histo in hist_X0_detectors.items(): theLegend_SubDetectors.AddEntry(histo, det, "f") theLegend_SubDetectors.Draw() @@ -643,7 +642,7 @@ def createPlots_(plot, geometry): plots[plot].ordinate) stack_X0_Materials = THStack("stack_X0",stackTitle_Materials) stack_X0_Materials.Add(hist_X0_detectors["BeamPipe"]) - for label, [num, color, leg] in six.iteritems(hist_label_to_num): + for label, [num, color, leg] in hist_label_to_num.items(): if label is 'SUM': continue stack_X0_Materials.Add(hist_X0_elements[label]) @@ -666,7 +665,7 @@ def createPlots_(plot, geometry): theLegend_Materials.SetBorderSize(0) theLegend_Materials.AddEntry(hist_X0_detectors["BeamPipe"], "Beam Pipe", "f") - for label, [num, color, leg] in six.iteritems(hist_label_to_num): + for label, [num, color, leg] in hist_label_to_num.items(): if label is 'SUM': continue theLegend_Materials.AddEntry(hist_X0_elements[label], leg, "f") @@ -715,7 +714,7 @@ def createPlotsReco_(reco_file, label, debug=False): for s in sPREF: hs = THStack("hs",""); histos = [] - for det, color in six.iteritems(sDETS): + for det, color in sDETS.items(): layer_number = 0 while True: layer_number += 1 @@ -828,7 +827,7 @@ def setRanges(h): maxY = h.GetBinContent(h.GetMaximumBin()) * legendSpace h.GetYaxis().SetRangeUser(minY, maxY) - for label, [num, color, leg] in six.iteritems(hist_label_to_num): + for label, [num, color, leg] in hist_label_to_num.items(): # We don't want the sum to be added as part of the stack if label is 'SUM': continue diff --git a/Validation/Geometry/test/MaterialBudgetHGCal.py b/Validation/Geometry/test/MaterialBudgetHGCal.py index 185a9f0dc03b2..9e2d2212a6501 100644 --- a/Validation/Geometry/test/MaterialBudgetHGCal.py +++ b/Validation/Geometry/test/MaterialBudgetHGCal.py @@ -7,7 +7,6 @@ import numpy as np import pandas as pd import matplotlib.pyplot as plt -import six from array import array oldargv = sys.argv[:] @@ -103,7 +102,7 @@ def createPlots_(plot, compounddetectorname): hist_X0_elements = OrderedDict() prof_X0_elements = OrderedDict() - for subDetector,color in six.iteritems(DETECTORS): + for subDetector,color in DETECTORS.items(): subDetectorFilename = "matbdg_%s.root" % subDetector if not checkFile_(subDetectorFilename): print("Error opening file: %s" % subDetectorFilename) @@ -117,7 +116,7 @@ def createPlots_(plot, compounddetectorname): hist_X0_detectors[subDetector] = prof_X0_XXX.ProjectionX() # category profiles - for label, [num, color, leg] in six.iteritems(hist_label_to_num): + for label, [num, color, leg] in hist_label_to_num.items(): prof_X0_elements[label] = subDetectorFile.Get("%d" % (num + plots[plot].plotNumber)) hist_X0_elements[label] = assignOrAddIfExists_(hist_X0_elements.setdefault(label, None), prof_X0_elements[label]) @@ -130,10 +129,10 @@ def createPlots_(plot, compounddetectorname): cumulative_matbdg.SetDirectory(0) # colors - for det, color in six.iteritems(DETECTORS): + for det, color in DETECTORS.items(): setColorIfExists_(hist_X0_detectors, det, color) - for label, [num, color, leg] in six.iteritems(hist_label_to_num): + for label, [num, color, leg] in hist_label_to_num.items(): hist_X0_elements[label].SetFillColor(color) # First Plot: BeamPipe + Tracker + ECAL + HCal + HGCal + MB + MGNT @@ -141,7 +140,7 @@ def createPlots_(plot, compounddetectorname): stackTitle_SubDetectors = "Material Budget;%s;%s" % ( plots[plot].abscissa,plots[plot].ordinate) stack_X0_SubDetectors = THStack("stack_X0",stackTitle_SubDetectors) - for det, histo in six.iteritems(hist_X0_detectors): + for det, histo in hist_X0_detectors.items(): stack_X0_SubDetectors.Add(histo) cumulative_matbdg.Add(histo, 1) @@ -164,7 +163,7 @@ def createPlots_(plot, compounddetectorname): theLegend_SubDetectors.SetFillStyle(0) theLegend_SubDetectors.SetBorderSize(0) - for det, histo in six.iteritems(hist_X0_detectors): + for det, histo in hist_X0_detectors.items(): theLegend_SubDetectors.AddEntry(histo, det, "f") theLegend_SubDetectors.Draw() @@ -251,7 +250,7 @@ def createPlots2D_(plot, compounddetectorname): hist_X0_elements = OrderedDict() prof_X0_elements = OrderedDict() - for subDetector,color in six.iteritems(DETECTORS): + for subDetector,color in DETECTORS.items(): subDetectorFilename = "matbdg_%s.root" % subDetector if not checkFile_(subDetectorFilename): print("Error opening file: %s" % subDetectorFilename) @@ -323,11 +322,11 @@ def createPlots2D_(plot, compounddetectorname): # colors - for det, color in six.iteritems(DETECTORS): + for det, color in DETECTORS.items(): hist_X0_detectors[det].SetMarkerColor(color) hist_X0_detectors[det].SetFillColor(color) - for det, histo in six.iteritems(hist_X0_detectors): + for det, histo in hist_X0_detectors.items(): print(det) histo.Draw("same") @@ -338,7 +337,7 @@ def createPlots2D_(plot, compounddetectorname): theLegend_SubDetectors.SetFillStyle(0) theLegend_SubDetectors.SetBorderSize(0) - for det, histo in six.iteritems(hist_X0_detectors): + for det, histo in hist_X0_detectors.items(): theLegend_SubDetectors.AddEntry(histo, det, "f") #theLegend_SubDetectors.AddEntry(hgbound1, "HGCal Eta Boundaries [1.3, 3.0]", "l") @@ -405,7 +404,7 @@ def createPlotsReco_(reco_file, label, debug=False): for s in sPREF: hs = THStack("hs",""); histos = [] - for det, color in six.iteritems(sDETS): + for det, color in sDETS.items(): layer_number = 0 while True: layer_number += 1 @@ -508,7 +507,7 @@ def createCompoundPlots(detector, plot): # get TProfiles prof_X0_elements = OrderedDict() hist_X0_elements = OrderedDict() - for label, [num, color, leg] in six.iteritems(hist_label_to_num): + for label, [num, color, leg] in hist_label_to_num.items(): #print label, num, color, leg prof_X0_elements[label] = theDetectorFile.Get("%d" % (num + plots[plot].plotNumber)) hist_X0_elements[label] = prof_X0_elements[label].ProjectionX() @@ -529,7 +528,7 @@ def createCompoundPlots(detector, plot): print("*** Open file... %s" % subDetectorFilename) # subdetector profiles - for label, [num, color, leg] in six.iteritems(hist_label_to_num): + for label, [num, color, leg] in hist_label_to_num.items(): prof_X0_elements[label] = subDetectorFile.Get("%d" % (num + plots[plot].plotNumber)) hist_X0_elements[label].Add(prof_X0_elements[label].ProjectionX("B_%s" % prof_X0_elements[label].GetName()) , +1.000) @@ -539,7 +538,7 @@ def createCompoundPlots(detector, plot): plots[plot].abscissa, plots[plot].ordinate) stack_X0 = THStack("stack_X0", stackTitle); - for label, [num, color, leg] in six.iteritems(hist_label_to_num): + for label, [num, color, leg] in hist_label_to_num.items(): stack_X0.Add(hist_X0_elements[label]) # canvas @@ -559,7 +558,7 @@ def createCompoundPlots(detector, plot): if plot == "x_vs_phi" or plot == "l_vs_phi": theLegend = TLegend(0.65, 0.30, 0.89, 0.70) if plot == "x_vs_R" or plot == "l_vs_R": theLegend = TLegend(0.75, 0.60, 0.95, 0.90) - for label, [num, color, leg] in six.iteritems(hist_label_to_num): + for label, [num, color, leg] in hist_label_to_num.items(): theLegend.AddEntry(hist_X0_elements[label], leg, "f") theLegend.Draw(); @@ -981,17 +980,17 @@ def GetSiliconZValuesFromXML(): #First I loop through labels to put the hide button in twiki #All HGCal print("---+++ Results: Plots for individual material in all HGCal") - for label, [num, color, leg] in six.iteritems(hist_label_to_num): + for label, [num, color, leg] in hist_label_to_num.items(): for p in ["x_vs_z_vs_Rsum", "l_vs_z_vs_Rsum", "x_vs_z_vs_Rsumcos", "l_vs_z_vs_Rsumcos", "x_vs_z_vs_Rloc", "l_vs_z_vs_Rloc"]: TwikiPrintout(p, leg, "all") #Z+ print("---+++ Results: Plots for individual material in Z+ Endcap") - for label, [num, color, leg] in six.iteritems(hist_label_to_num): + for label, [num, color, leg] in hist_label_to_num.items(): for p in ["x_vs_z_vs_Rsum", "l_vs_z_vs_Rsum", "x_vs_z_vs_Rsumcos", "l_vs_z_vs_Rsumcos", "x_vs_z_vs_Rloc", "l_vs_z_vs_Rloc"]: TwikiPrintout(p, leg, "zplus") #Z- print("---+++ Results: Plots for individual material in Z- Endcap") - for label, [num, color, leg] in six.iteritems(hist_label_to_num): + for label, [num, color, leg] in hist_label_to_num.items(): for p in ["x_vs_z_vs_Rsum", "l_vs_z_vs_Rsum", "x_vs_z_vs_Rsumcos", "l_vs_z_vs_Rsumcos", "x_vs_z_vs_Rloc", "l_vs_z_vs_Rloc"]: TwikiPrintout(p, leg, "zminus") @@ -1003,7 +1002,7 @@ def GetSiliconZValuesFromXML(): #First the total create2DPlots(args.detector, p, plots[p].plotNumber, "") #Then, the rest - for label, [num, color, leg] in six.iteritems(hist_label_to_num): + for label, [num, color, leg] in hist_label_to_num.items(): #print label, num, color, leg create2DPlots(args.detector, p, num + plots[p].plotNumber, leg) diff --git a/Validation/HGCalValidation/python/hgcalPlots.py b/Validation/HGCalValidation/python/hgcalPlots.py index 56ca9dab0356e..14157ce8c1b79 100644 --- a/Validation/HGCalValidation/python/hgcalPlots.py +++ b/Validation/HGCalValidation/python/hgcalPlots.py @@ -4,7 +4,6 @@ import copy import collections -import six import ROOT from ROOT import TFile, TString from ROOT import gDirectory diff --git a/Validation/HGCalValidation/python/html.py b/Validation/HGCalValidation/python/html.py index aa31edf54909e..2e41c5483d750 100644 --- a/Validation/HGCalValidation/python/html.py +++ b/Validation/HGCalValidation/python/html.py @@ -1,6 +1,5 @@ import os import collections -import six _sampleName = { "RelValCloseByParticleGun_CE_H_Fine_300um" : "CloseByParticleGun in CE-H Fine section with 300 um", diff --git a/Validation/HGCalValidation/test/python/digiValidation_cfg.py b/Validation/HGCalValidation/test/python/digiValidation_cfg.py index 1298a4b39aa2f..b3bd63e9ef0c1 100644 --- a/Validation/HGCalValidation/test/python/digiValidation_cfg.py +++ b/Validation/HGCalValidation/test/python/digiValidation_cfg.py @@ -1,5 +1,4 @@ import FWCore.ParameterSet.Config as cms -import six from Configuration.Eras.Era_Phase2C9_cff import Phase2C9 process = cms.Process('testHGCalDigiLocal',Phase2C9) @@ -119,7 +118,7 @@ for path in process.paths: getattr(process,path)._seq = process.generator * getattr(process,path)._seq -for label, prod in six.iteritems(process.producers_()): +for label, prod in process.producers_().items(): if prod.type_() == "OscarMTProducer": # ugly hack prod.__dict__['_TypedParameterizable__type'] = "OscarProducer" diff --git a/Validation/HGCalValidation/test/python/rechitValidation_cfg.py b/Validation/HGCalValidation/test/python/rechitValidation_cfg.py index 72fce599744d7..3991f9c2dc318 100644 --- a/Validation/HGCalValidation/test/python/rechitValidation_cfg.py +++ b/Validation/HGCalValidation/test/python/rechitValidation_cfg.py @@ -1,5 +1,4 @@ import FWCore.ParameterSet.Config as cms -import six from Configuration.Eras.Era_Phase2C9_cff import Phase2C9 process = cms.Process('testHGCalRECLocal',Phase2C9) @@ -134,7 +133,7 @@ getattr(process,path)._seq = process.generator * getattr(process,path)._seq # End of customisation functions -for label, prod in six.iteritems(process.producers_()): +for label, prod in process.producers_().items(): if prod.type_() == "OscarMTProducer": # ugly hack prod.__dict__['_TypedParameterizable__type'] = "OscarProducer" diff --git a/Validation/HGCalValidation/test/python/simHitValidation_cfg.py b/Validation/HGCalValidation/test/python/simHitValidation_cfg.py index 5c659988f1f57..c99c49a42712f 100644 --- a/Validation/HGCalValidation/test/python/simHitValidation_cfg.py +++ b/Validation/HGCalValidation/test/python/simHitValidation_cfg.py @@ -1,5 +1,4 @@ import FWCore.ParameterSet.Config as cms -import six from Configuration.Eras.Era_Phase2C9_cff import Phase2C9 process = cms.Process('testHGCalSIMLocal',Phase2C9) @@ -85,7 +84,7 @@ process.g4SimHits.Physics.DefaultCutValue = 0.1 process.g4SimHits.HCalSD.TestNumberingScheme = True -for label, prod in six.iteritems(process.producers_()): +for label, prod in process.producers_().items(): if prod.type_() == "OscarMTProducer": # ugly hack prod.__dict__['_TypedParameterizable__type'] = "OscarProducer" diff --git a/Validation/RecoB/scripts/cuy.py b/Validation/RecoB/scripts/cuy.py index 6dd886f25db72..856872a9f688e 100755 --- a/Validation/RecoB/scripts/cuy.py +++ b/Validation/RecoB/scripts/cuy.py @@ -12,7 +12,6 @@ # # imported from UserCode/Yumiceva/cuy from __future__ import print_function -import six # # modified by Adrien Caudron to create TGraphErrors for b-tag performance plots # UCLouvain, 2012 @@ -79,7 +78,7 @@ def nonzero(self): # will become the nonzero method of optparse.Values "True if options were given" - for v in six.itervalues(self.__dict__): + for v in self.__dict__.values(): if v is not None: return True return False diff --git a/Validation/RecoTau/python/RecoTauValidation_cfi.py b/Validation/RecoTau/python/RecoTauValidation_cfi.py index 8131a6c9c5318..374fe5cf8528b 100644 --- a/Validation/RecoTau/python/RecoTauValidation_cfi.py +++ b/Validation/RecoTau/python/RecoTauValidation_cfi.py @@ -11,7 +11,6 @@ """ -import six RecoTauValidation_cfi.py @@ -466,7 +465,7 @@ def ConvertDrawJobToLegacyCompare(input): if not hasattr(input, "drawJobs"): return myDrawJobs = input.drawJobs.parameters_() - for drawJobName, drawJobData in six.iteritems(myDrawJobs): + for drawJobName, drawJobData in myDrawJobs.items(): print(drawJobData) if not drawJobData.plots.pythonTypeName() == "cms.PSet": continue diff --git a/Validation/RecoTau/python/ValidationOptions_cff.py b/Validation/RecoTau/python/ValidationOptions_cff.py index dcc25bb88011f..cdb4da0be92e6 100644 --- a/Validation/RecoTau/python/ValidationOptions_cff.py +++ b/Validation/RecoTau/python/ValidationOptions_cff.py @@ -1,7 +1,6 @@ from __future__ import print_function import os import sys -import six try: ReleaseBase = os.path.join(os.environ['CMSSW_BASE'], "src") ReleaseVersion = os.environ['CMSSW_VERSION'] @@ -150,7 +149,7 @@ def checkOptionsForBadInput(): # Sanity check - for optionName, allowedValues in six.iteritems(allowedOptions): + for optionName, allowedValues in allowedOptions.items(): if not getattr(options, optionName) in allowedValues: print("Bad input to option: %s" % optionName) sys.exit() @@ -172,10 +171,10 @@ def CMSSWEnvironmentIsCurrent(): def returnOptionsString(): ''' format the options to be passed on the command line. Used when submitting batch jobs''' outputString = "" - for optionsName, optionValue in six.iteritems(options.__dict__['_singletons']): + for optionsName, optionValue in options.__dict__['_singletons'].items(): outputString += " %s=%s" % (optionsName, optionValue) - for optionsName, optionValues in six.iteritems(options.__dict__['_lists']): + for optionsName, optionValues in options.__dict__['_lists'].items(): for anOption in optionValues: outputString += " %s=%s" % (optionsName, anOption) return outputString diff --git a/Validation/RecoTau/python/compare.py b/Validation/RecoTau/python/compare.py index 9de88d634a16b..71a9b85070ba7 100644 --- a/Validation/RecoTau/python/compare.py +++ b/Validation/RecoTau/python/compare.py @@ -5,7 +5,6 @@ from array import array from ROOT import gROOT, gStyle, TH1F, TH1D, TF1, TFile, TCanvas, TH2F, TLegend, TGraphAsymmErrors, Double, TLatex import os, copy, sys -import six gROOT.SetBatch(True) officialStyle(gStyle) @@ -314,11 +313,11 @@ def makeEffPlotsVars(tree, varx, vary, sel, nbinx, xmin, xmax, nbiny, ymin, ymax '7_6_1_v3':{'file':'Myroot_7_6_1_v3_' + runtype + '.root', 'col':3, 'marker':23, 'width':1}, } - for hname, hdict in sorted(six.iteritems(vardict)): + for hname, hdict in sorted(vardict.items()): hists = [] - for rel, rdict in sorted(six.iteritems(sampledict)): + for rel, rdict in sorted(sampledict.items()): if rel.find('7_6_1')==-1 and (hname.find('MVA6')!=-1 or hname.find('MVArun2')!=-1): continue @@ -361,14 +360,14 @@ def makeEffPlotsVars(tree, varx, vary, sel, nbinx, xmin, xmax, nbiny, ymin, ymax } - for hname, hdict in sorted(six.iteritems(hvardict)): + for hname, hdict in sorted(hvardict.items()): hists = [] if runtype != 'ZTT' and hname.find('pt_resolution')!=-1: continue - for rel, rdict in sorted(six.iteritems(sampledict)): + for rel, rdict in sorted(sampledict.items()): tfile = TFile(rdict['file']) tree = tfile.Get('per_tau') diff --git a/Validation/RecoTrack/python/TrackValidation_cff.py b/Validation/RecoTrack/python/TrackValidation_cff.py index b0a933365082e..5190b7cd4328f 100644 --- a/Validation/RecoTrack/python/TrackValidation_cff.py +++ b/Validation/RecoTrack/python/TrackValidation_cff.py @@ -23,7 +23,6 @@ import RecoTracker.IterativeTracking.iterativeTkConfig as _cfg import RecoTracker.IterativeTracking.iterativeTkUtils as _utils from Configuration.Eras.Modifier_fastSim_cff import fastSim -import six ### First define the stuff for the standard validation sequence ## Track selectors @@ -184,7 +183,7 @@ def _taskForEachEra(function, args, names, task, modDict, plainArgs=[], modifyTa _era.toReplaceWith(defaultTask, modDict[task+_postfix]) def _setForEra(module, eraName, era, **kwargs): if eraName == "": - for key, value in six.iteritems(kwargs): + for key, value in kwargs.items(): setattr(module, key, value) else: era.toModify(module, **kwargs) @@ -244,7 +243,7 @@ def _getMVASelectors(postfix): mvaSel = _utils.getMVASelectors(postfix) pset = cms.untracked.PSet() - for iteration, (trackProducer, classifiers) in six.iteritems(mvaSel): + for iteration, (trackProducer, classifiers) in mvaSel.items(): setattr(pset, trackProducer, cms.untracked.vstring(classifiers)) return pset for _eraName, _postfix, _era in _relevantEras: diff --git a/Validation/RecoTrack/python/customiseTrackingNtuple.py b/Validation/RecoTrack/python/customiseTrackingNtuple.py index 2f1e07ffb123e..2ff31682e9154 100644 --- a/Validation/RecoTrack/python/customiseTrackingNtuple.py +++ b/Validation/RecoTrack/python/customiseTrackingNtuple.py @@ -1,5 +1,4 @@ import FWCore.ParameterSet.Config as cms -import six def _label(tag): if hasattr(tag, "getModuleLabel"): @@ -71,7 +70,7 @@ def customiseTrackingNtupleTool(process, isRECO = True, mergeIters = False): # remove the validation_stepN and prevalidatin_stepN of phase2 validation... for p in [process.paths_(), process.endpaths_()]: - for pathName, path in six.iteritems(p): + for pathName, path in p.items(): if "prevalidation_step" in pathName: if len(pathName.replace("prevalidation_step", "")) > 0: modifier.toReplaceWith(path, cms.Path()) @@ -80,10 +79,10 @@ def customiseTrackingNtupleTool(process, isRECO = True, mergeIters = False): modifier.toReplaceWith(path, cms.EndPath()) # Remove all output modules - for outputModule in six.itervalues(process.outputModules_()): - for path in six.itervalues(process.paths_()): + for outputModule in process.outputModules_().values(): + for path in process.paths_().values(): path.remove(outputModule) - for path in six.itervalues(process.endpaths_()): + for path in process.endpaths_().values(): path.remove(outputModule) diff --git a/Validation/RecoTrack/python/plotting/html.py b/Validation/RecoTrack/python/plotting/html.py index 41289cc835054..1bd245bb73b89 100644 --- a/Validation/RecoTrack/python/plotting/html.py +++ b/Validation/RecoTrack/python/plotting/html.py @@ -1,6 +1,5 @@ import os import collections -import six def _lowerFirst(s): return s[0].lower()+s[1:] @@ -374,7 +373,7 @@ def addTable(self, section, table): self._tables[section] = table def isEmpty(self): - for plotSet in six.itervalues(self._plotSets): + for plotSet in self._plotSets.values(): if len(plotSet) > 0: return False diff --git a/Validation/RecoTrack/python/plotting/ntupleDataFormat.py b/Validation/RecoTrack/python/plotting/ntupleDataFormat.py index f3918c513ef5e..4e9668036de3a 100644 --- a/Validation/RecoTrack/python/plotting/ntupleDataFormat.py +++ b/Validation/RecoTrack/python/plotting/ntupleDataFormat.py @@ -6,7 +6,6 @@ from Validation.RecoTrack.plotting.ntupleEnum import * from Validation.RecoTrack.plotting.ntupleEnum import _Enum -import six class _Collection(object): """Adaptor class representing a collection of objects. @@ -1020,7 +1019,7 @@ def bestMatchingTrack(self): tracks[track.index()] = 1 best = (None, 2) - for trackIndex, nhits in six.iteritems(tracks): + for trackIndex, nhits in tracks.items(): if nhits > best[1]: best = (trackIndex, nhits) if best[0] is None: diff --git a/Validation/RecoTrack/python/plotting/ntupleEnum.py b/Validation/RecoTrack/python/plotting/ntupleEnum.py index 526f513e0755d..167e5cdc91130 100644 --- a/Validation/RecoTrack/python/plotting/ntupleEnum.py +++ b/Validation/RecoTrack/python/plotting/ntupleEnum.py @@ -1,9 +1,8 @@ # Poor-man enum class with string conversion class _Enum: def __init__(self, **values): - import six self._reverse = {} - for key, value in six.iteritems(values): + for key, value in values.items(): setattr(self, key, value) if value in self._reverse: raise Exception("Value %s is already used for a key %s, tried to re-add it for key %s" % (value, self._reverse[value], key)) diff --git a/Validation/RecoTrack/python/plotting/ntuplePlotting.py b/Validation/RecoTrack/python/plotting/ntuplePlotting.py index cca142e05446d..f05956f853205 100644 --- a/Validation/RecoTrack/python/plotting/ntuplePlotting.py +++ b/Validation/RecoTrack/python/plotting/ntuplePlotting.py @@ -4,7 +4,6 @@ import ROOT import Validation.RecoTrack.plotting.plotting as plotting -import six def saveHistograms(tdirectory, histos): for h in histos: @@ -130,7 +129,7 @@ def drawMany(name, histoDicts, styles=_defaultStyles, opts={}, ncolumns=4): histoNames = histoDicts[0].keys() ratio = False ratioFactor = _ratioFactor - for opt in six.itervalues(opts): + for opt in opts.values(): if "ratio" in opt: ratio = True if "ratioFactor" in opt: @@ -150,7 +149,7 @@ def drawMany(name, histoDicts, styles=_defaultStyles, opts={}, ncolumns=4): histos = collections.defaultdict(list) for d in histoDicts: - for n, h in six.iteritems(d): + for n, h in d.items(): histos[n].append(h) for i, histoName in enumerate(histoNames): diff --git a/Validation/RecoTrack/python/plotting/ntuplePrintersDiff.py b/Validation/RecoTrack/python/plotting/ntuplePrintersDiff.py index fa45abfb08945..3955f5570bec1 100644 --- a/Validation/RecoTrack/python/plotting/ntuplePrintersDiff.py +++ b/Validation/RecoTrack/python/plotting/ntuplePrintersDiff.py @@ -9,7 +9,6 @@ from operator import itemgetter, methodcaller from Validation.RecoTrack.plotting.ntupleDataFormat import * -import six # Common track-track matching by hits (=clusters) def _commonHits(trk1, trk2): @@ -77,7 +76,7 @@ def match(self, trk): tracks[ot] += 1 best = (None, 0) - for t, ncommon in six.iteritems(tracks): + for t, ncommon in tracks.items(): if ncommon > best[1]: best = (t, ncommon) return best @@ -486,7 +485,7 @@ def _getOrCreateAssoc(trk, d, **kwargs): # merge results # any good way to avoid copy-past? - for ind, assoc in six.iteritems(trkAssoc1): + for ind, assoc in trkAssoc1.items(): for t1 in assoc.trks1(): a = trkAssoc1[t1.index()] assoc.merge(a) @@ -495,7 +494,7 @@ def _getOrCreateAssoc(trk, d, **kwargs): a = trkAssoc2[t2.index()] assoc.merge(a) a.merge(assoc) - for ind, assoc in six.iteritems(trkAssoc2): + for ind, assoc in trkAssoc2.items(): for t2 in assoc.trks2(): a = trkAssoc2[t2.index()] assoc.merge(a) @@ -505,7 +504,7 @@ def _getOrCreateAssoc(trk, d, **kwargs): assoc.merge(a) a.merge(assoc) - for ind, assoc in itertools.chain(six.iteritems(trkAssoc1), six.iteritems(trkAssoc2)): + for ind, assoc in itertools.chain(trkAssoc1.items(), trkAssoc2.items()): #if ind in [437, 1101]: # print "----" # print ind, [t.index() for t in assoc.trks1()], [t.index() for t in assoc.trks2()] diff --git a/Validation/RecoTrack/python/plotting/plotting.py b/Validation/RecoTrack/python/plotting/plotting.py index eff7bab9f34bb..2002766fd0b7e 100644 --- a/Validation/RecoTrack/python/plotting/plotting.py +++ b/Validation/RecoTrack/python/plotting/plotting.py @@ -9,7 +9,6 @@ import difflib import collections -import six import ROOT ROOT.gROOT.SetBatch(True) ROOT.PyConfig.IgnoreCommandLineOptions = True @@ -999,7 +998,7 @@ def create(self, tdirectory): values = _th1ToOrderedDict(th1, self._renameBin) binIndexOrder = [] # for reordering bins if self._originalOrder is True - for i, (key, labels) in enumerate(six.iteritems(self._mapping)): + for i, (key, labels) in enumerate(self._mapping.items()): sumTime = 0. sumErrorSq = 0. nsum = 0 @@ -1096,7 +1095,7 @@ def __str__(self): def create(self, tdirectory): """Create and return the histogram from a TDirectory""" result = [] - for key, histoName in six.iteritems(self._mapping): + for key, histoName in self._mapping.items(): th1 = _getObject(tdirectory, histoName) if th1 is None: continue @@ -1830,7 +1829,7 @@ def _set(attr, default): self._histograms = [] def setProperties(self, **kwargs): - for name, value in six.iteritems(kwargs): + for name, value in kwargs.items(): if not hasattr(self, "_"+name): raise Exception("No attribute '%s'" % name) setattr(self, "_"+name, value) @@ -2291,7 +2290,7 @@ def _set(attr, default): self._ratioFactor = 1.25 def setProperties(self, **kwargs): - for name, value in six.iteritems(kwargs): + for name, value in kwargs.items(): if not hasattr(self, "_"+name): raise Exception("No attribute '%s'" % name) setattr(self, "_"+name, value) diff --git a/Validation/RecoTrack/python/plotting/trackingPlots.py b/Validation/RecoTrack/python/plotting/trackingPlots.py index 7609932d5abef..45a120b3be0f7 100644 --- a/Validation/RecoTrack/python/plotting/trackingPlots.py +++ b/Validation/RecoTrack/python/plotting/trackingPlots.py @@ -4,7 +4,6 @@ import copy import collections -import six import ROOT ROOT.gROOT.SetBatch(True) ROOT.PyConfig.IgnoreCommandLineOptions = True @@ -632,7 +631,7 @@ def testColl(coll): break # next try "old style" if algo is None: - for coll, name in six.iteritems(_possibleTrackingCollsOld): + for coll, name in _possibleTrackingCollsOld.items(): if testColl(coll.lower()): algo = name break @@ -962,7 +961,7 @@ def getSection(self, dqmSubFolder): def create(self, tdirectory): def _getAlgoQuality(data, algo, quality): - for label, value in six.iteritems(data): + for label, value in data.items(): (a, q) = _mapCollectionToAlgoQuality(label) if a == algo and q == quality: return value[0] # value is (value, uncertainty) tuple diff --git a/Validation/RecoTrack/test/fakeAnalysis/analysis.py b/Validation/RecoTrack/test/fakeAnalysis/analysis.py index 115429ac785b2..9fba01be7d8cb 100644 --- a/Validation/RecoTrack/test/fakeAnalysis/analysis.py +++ b/Validation/RecoTrack/test/fakeAnalysis/analysis.py @@ -10,7 +10,6 @@ from math import sqrt, copysign, sin, cos, pi from Validation.RecoTrack.plotting.ntuple import * -import six ##### GLOBAL VARIABLES ##### @@ -1161,17 +1160,17 @@ def Save_Normalisation_Coefficients(ntuple_file): ''' norm_c = copy(layer_data_tmp) - print(sum([val for ind, val in six.iteritems(norm_c)])) + print(sum([val for ind, val in norm_c.items()])) for event in ntuple_file: print(event.entry()+1) for particle in event.trackingParticles(): for hit in particle.hits(): if hit.isValidHit(): norm_c[layer_names_rev[hit.layerStr()]] += 1 - norm_sum = sum([val for ind, val in six.iteritems(norm_c)]) + norm_sum = sum([val for ind, val in norm_c.items()]) print(norm_sum) print(norm_c) - for i, c in six.iteritems(norm_c): + for i, c in norm_c.items(): norm_c[i] = 1.0*c/norm_sum #normalisation = [1.0*c/norm_sum for c in norm_c] print("normalisation_coefficients.dmp") diff --git a/Validation/RecoTrack/test/fakeAnalysis/graphics.py b/Validation/RecoTrack/test/fakeAnalysis/graphics.py index 70ce9e270b817..fdaaa7850e85d 100644 --- a/Validation/RecoTrack/test/fakeAnalysis/graphics.py +++ b/Validation/RecoTrack/test/fakeAnalysis/graphics.py @@ -11,7 +11,6 @@ import analysis from math import sqrt, copysign, sin, cos, pi -import six class EventPlotter(object): ''' @@ -678,7 +677,7 @@ def EndOfTrackingHistogram(self, end_list, hist_type="end_class", end_mask = [], if normalised: norm_cff = analysis.Get_Normalisation_Coefficients() - for i, v in six.iteritems(data_dict): + for i, v in data_dict.items(): data_dict[i] = int(round(v*norm_cff[i])) name = "" diff --git a/Validation/Tools/python/GenObject.py b/Validation/Tools/python/GenObject.py index d064f7e120bce..6aeb53b468316 100755 --- a/Validation/Tools/python/GenObject.py +++ b/Validation/Tools/python/GenObject.py @@ -15,7 +15,6 @@ import sys import inspect import ROOT -import six from functools import reduce ROOT.gROOT.SetBatch() @@ -701,7 +700,7 @@ def _genObjectClone (objName, tupleName, obj, index = -1): genObj = GenObject (objName) origObj = obj if debug: warn (objName, spaces = 9) - for genVar, ntDict in six.iteritems(tofillObjDict): + for genVar, ntDict in tofillObjDict.items(): if debug: warn (genVar, spaces = 12) # lets work our way down the list partsList = ntDict[0] @@ -847,7 +846,7 @@ def setupDiffOutputTree (outputFile, diffName, missingName, @staticmethod def _fillRootObjects (event): """Fills root objects from GenObject 'event'""" - for objName, obj in sorted (six.iteritems(event)): + for objName, obj in sorted (event.items()): if GenObject.isSingleton (objName): # Just one GenObject._rootObjectCopy (obj, @@ -953,13 +952,13 @@ def loadEventFromTree (eventTree, eventIndex, def printEvent (event): """Prints out event dictionary. Mostly for debugging""" # Print out all singletons first - for objName, obj in sorted (six.iteritems(event)): + for objName, obj in sorted (event.items()): #obj = event[objName] # is this a singleton? if GenObject.isSingleton (objName): print("%s: %s" % (objName, obj)) # Now print out all vectors - for objName, obj in sorted (six.iteritems(event)): + for objName, obj in sorted (event.items()): #obj = event[objName] # is this a singleton? if not GenObject.isSingleton (objName): @@ -974,7 +973,7 @@ def printEvent (event): def setAliases (eventTree, tupleName): """runs SetAlias on all saved aliases""" aliases = GenObject._ntupleDict[tupleName].get('_alias', {}) - for name, alias in six.iteritems(aliases): + for name, alias in aliases.items(): eventTree.SetAlias (name, alias) @@ -1552,7 +1551,7 @@ def __init__ (self, objName): raise RuntimeError("Failed to create GenObject object.") self._localObjsDict = GenObject._objsDict [objName] self._objName = objName; - for key, varDict in six.iteritems(self._localObjsDict): + for key, varDict in self._localObjsDict.items(): # if the key starts with an '_', then it is not a # variable, so don't treat it as one. if key.startswith ("_"): @@ -1623,7 +1622,7 @@ def __call__ (self, key): def __str__ (self): """String representation""" retval = "" - for varName, value in sorted (six.iteritems(self.__dict__)): + for varName, value in sorted (self.__dict__.items()): if varName.startswith ('_'): continue form = self.getVariableProperty (varName, "form") if form: diff --git a/Validation/Tools/scripts/diffTreeTool.py b/Validation/Tools/scripts/diffTreeTool.py index a145a2e2e62f8..b2e3501756fc5 100755 --- a/Validation/Tools/scripts/diffTreeTool.py +++ b/Validation/Tools/scripts/diffTreeTool.py @@ -6,7 +6,6 @@ import os import re from pprint import pprint -import six epsilon = 1.e-4 @@ -21,7 +20,7 @@ def getDictFromObject (obj, varDict, prefix = ''): if prefix: obj = getPieceFromObject (obj, prefix) retval = {} - for key, description in six.iteritems(varDict): + for key, description in varDict.items(): retval[key] = getPieceFromObject (obj, description) return retval @@ -106,7 +105,7 @@ def format (objDict, label, spacing=9, firstOnly = False): " class at a time.") name = match.group(1) continue - for key, regexTuple in six.iteritems(variableREDict): + for key, regexTuple in variableREDict.items(): if regexTuple[0].search(line): typeFoundSet.add( key ) continue diff --git a/Validation/Tools/scripts/runEdmFileComparison.py b/Validation/Tools/scripts/runEdmFileComparison.py index 920088465a867..cbe96e03b51c1 100755 --- a/Validation/Tools/scripts/runEdmFileComparison.py +++ b/Validation/Tools/scripts/runEdmFileComparison.py @@ -7,7 +7,6 @@ import re import os import sys -import six piecesRE = re.compile (r'(.+?)\s+"(\S+)"\s+"(\S*)"\s+"(\S+)"') #colonRE = re.compile (r':+') @@ -238,7 +237,7 @@ def label (self): ######################################### ## Run useReflexToDescribeForGenObject ## ######################################### - for key, value in sorted (six.iteritems(collection)): + for key, value in sorted (collection.items()): name = value[0].name prettyName = nonAlphaRE.sub('', name) descriptionName = prettyName + '.txt' @@ -279,7 +278,7 @@ def label (self): ################################## ## Run edmOneToOneComparison.py ## ################################## - for key, value in sorted (six.iteritems(collection)): + for key, value in sorted (collection.items()): #print "%-40s" % key, for obj in value: # print " ", obj.label(), diff --git a/Validation/Tools/scripts/summarizeEdmComparisonLogfiles.py b/Validation/Tools/scripts/summarizeEdmComparisonLogfiles.py index e03acb049ae63..17a2471282bbd 100755 --- a/Validation/Tools/scripts/summarizeEdmComparisonLogfiles.py +++ b/Validation/Tools/scripts/summarizeEdmComparisonLogfiles.py @@ -6,7 +6,6 @@ from glob import glob import re import pprint -import six import commands countRE = re.compile (r'^count_(\w+)') avoid = ['index', 'print'] @@ -19,7 +18,7 @@ def summaryOK (summary): compared = summary.get('eventsCompared', -1) if len( summary) != 2: retval = False - for key,value in six.iteritems(summary): + for key,value in summary.items(): if countRE.search(key): count = value return (retval, {'count':count, 'compared':compared}) @@ -148,7 +147,7 @@ def summaryOK (summary): if success1RE.search (line) or success2RE.search(line): success = True continue - for key, regex in six.iteritems(problemDict): + for key, regex in problemDict.items(): #print "considering %s for %s" % (key, line) if regex.search(line): if key in problemSet: @@ -211,7 +210,7 @@ def summaryOK (summary): print("weird: ", weird) print("Tool issue types:") total = 0 - for key, value in sorted (six.iteritems(problemTypes)): + for key, value in sorted (problemTypes.items()): print(" %-15s: %4d" % (key, value)) total += value print(" ", '-'*13, " : ----") @@ -221,7 +220,7 @@ def summaryOK (summary): if not options.counts: print("\nDetailed Problems list:") - for key, problemList in sorted (six.iteritems(problems)): + for key, problemList in sorted (problems.items()): if options.problem and problemList[0] != options.problem: continue if options.mismatch and not isinstance (problemList, str): @@ -236,5 +235,5 @@ def summaryOK (summary): if not options.problem and not options.mismatch: print("\n", '='*78, '\n') print("Success list:") - for key, successesList in sorted (six.iteritems(successes)): + for key, successesList in sorted (successes.items()): print(" %s:\n %s\n" % (key, successesList))