Skip to content

Commit

Permalink
Format Python code with black 23.1.0
Browse files Browse the repository at this point in the history
with manual fixes to 3 files to make them compatible with black 22.12.0 .

We need to keep black pinned at `<23` until we vendorise `packaging`, since
the new version of black depends on `packaging >=22.0` which is the version
where `LegacyVersion` (which we use for tool lineages) was removed.
  • Loading branch information
nsoranzo committed Feb 1, 2023
1 parent ac92d0d commit 30f6049
Show file tree
Hide file tree
Showing 195 changed files with 63 additions and 297 deletions.
2 changes: 0 additions & 2 deletions config/plugins/webhooks/demo/tool_list/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,15 +9,13 @@ def main(trans, webhook, params):
tools = trans.app.toolbox.tools()

for tool in tools:

try:
ts_data = tool[1].tool_shed_repository.to_dict()
panel = tool[1].get_panel_section()
except AttributeError:
continue

if ts_data["name"] + ts_data["installed_changeset_revision"] not in unique_tools:

unique_tools.append(ts_data["name"] + ts_data["installed_changeset_revision"])

data["tools"].append(
Expand Down
2 changes: 1 addition & 1 deletion contrib/galaxy_config_merger.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ def main():
if not config_sample.has_section(section):
logging.warning("-MISSING- section [%s] not found in sample file. It will be ignored.", section)
else:
for (name, value) in config.items(section):
for name, value in config.items(section):
if not config_sample.has_option(section, name):
if f"#{name}" not in config_sample_content:
logging.warning(
Expand Down
6 changes: 3 additions & 3 deletions lib/galaxy/actions/library.py
Original file line number Diff line number Diff line change
Expand Up @@ -213,7 +213,7 @@ def _get_path_paste_uploaded_datasets(self, trans, params, library_bunch, respon
(files_and_folders, _response_code, _message) = self._get_path_files_and_folders(params, preserve_dirs)
if _response_code:
return (uploaded_datasets, _response_code, _message)
for (path, name, folder) in files_and_folders:
for path, name, folder in files_and_folders:
uploaded_datasets.append(
self._make_library_uploaded_dataset(trans, params, name, path, "path_paste", library_bunch, folder)
)
Expand All @@ -224,7 +224,7 @@ def _get_path_files_and_folders(self, params, preserve_dirs):
if problem_response:
return problem_response
files_and_folders = []
for (line, path) in self._paths_list(params):
for line, path in self._paths_list(params):
line_files_and_folders = self._get_single_path_files_and_folders(line, path, preserve_dirs)
files_and_folders.extend(line_files_and_folders)
return files_and_folders, None, None
Expand Down Expand Up @@ -257,7 +257,7 @@ def _check_path_paste_params(self, params):
response_code = 400
return None, response_code, message
bad_paths = []
for (_, path) in self._paths_list(params):
for _, path in self._paths_list(params):
if not os.path.exists(path):
bad_paths.append(path)
if bad_paths:
Expand Down
1 change: 0 additions & 1 deletion lib/galaxy/auth/providers/pam_auth.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,6 @@


class PAM(AuthProvider):

plugin_type = "PAM"

def authenticate(self, email, username, password, options, request):
Expand Down
1 change: 0 additions & 1 deletion lib/galaxy/authnz/custos_authnz.py
Original file line number Diff line number Diff line change
Expand Up @@ -264,7 +264,6 @@ def _create_oauth2_session(self, state=None, scope=None):
return session

def _fetch_token(self, oauth2_session, trans):

if self.config.get("iam_client_secret"):
# Custos uses the Keycloak client secret to get the token
client_secret = self.config["iam_client_secret"]
Expand Down
1 change: 0 additions & 1 deletion lib/galaxy/celery/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -147,7 +147,6 @@ def decorate(func: Callable):
@shared_task(**celery_task_kwd)
@wraps(func)
def wrapper(*args, **kwds):

app = get_galaxy_app()
assert app

Expand Down
1 change: 0 additions & 1 deletion lib/galaxy/config/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -412,7 +412,6 @@ def _set_alt_paths(self, option, *alt_paths):
return path

def _update_raw_config_from_kwargs(self, kwargs):

type_converters: Dict[str, Callable[[Any], Union[bool, int, float, str]]] = {
"bool": string_as_bool,
"int": int,
Expand Down
1 change: 0 additions & 1 deletion lib/galaxy/datatypes/binary.py
Original file line number Diff line number Diff line change
Expand Up @@ -327,7 +327,6 @@ def display_peek(self, dataset: "DatasetInstance") -> str:


class DynamicCompressedArchive(CompressedArchive):

compressed_format: str
uncompressed_datatype_instance: Data

Expand Down
1 change: 0 additions & 1 deletion lib/galaxy/datatypes/converters/fasta_to_len.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,6 @@


def compute_fasta_length(fasta_file, out_file, keep_first_char, keep_first_word=False):

infile = fasta_file
keep_first_char = int(keep_first_char)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,6 @@ def __main__():
except Exception:
name = "region_%i" % count
try:

out.write("%s\t%i\t%i\t%s\t%i\t%s\n" % (region.chrom, region.start, region.end, name, 0, region.strand))
except Exception:
skipped_lines += 1
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,6 @@


def main():

# Read options, args.
parser = optparse.OptionParser()
parser.add_option("-c", "--chr-col", type="int", dest="chrom_col", default=1)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,6 @@


def main():

# Read options, args.
parser = optparse.OptionParser()
(options, args) = parser.parse_args()
Expand Down
2 changes: 2 additions & 0 deletions lib/galaxy/datatypes/dataproviders/decorators.py
Original file line number Diff line number Diff line change
Expand Up @@ -94,6 +94,7 @@ def dataprovider_factory(name, settings=None):
to __init__ arguments
:type settings: dictionary
"""

# TODO:?? use *args for settings allowing mulitple dictionaries
# make a function available through the name->provider dispatch to parse query strings
# callable like:
Expand Down Expand Up @@ -123,6 +124,7 @@ def _parse_query_string_settings(query_kwargs, settings=None):
Parse the values in `query_kwargs` from strings to the proper types
listed in the same key in `settings`.
"""

# TODO: this was a relatively late addition: review and re-think
def list_from_query_string(s):
# assume csv
Expand Down
2 changes: 0 additions & 2 deletions lib/galaxy/datatypes/goldenpath.py
Original file line number Diff line number Diff line change
Expand Up @@ -125,7 +125,6 @@ class AGPFile:
"""

def __init__(self, in_file):

self._agp_version = "2.1"
self._fname = os.path.abspath(in_file)

Expand Down Expand Up @@ -193,7 +192,6 @@ def _read_file(self):
def _add_line(self, agp_line):
# Perform validity checks if this is a new object
if agp_line.obj != self._current_obj:

# Check if we have already seen this object before
if agp_line.obj in self._seen_objs:
raise AGPError(self.fname, agp_line.line_number, "object identifier out of order")
Expand Down
3 changes: 0 additions & 3 deletions lib/galaxy/datatypes/isa.py
Original file line number Diff line number Diff line change
Expand Up @@ -108,7 +108,6 @@ def _get_main_file(self, dataset: "DatasetInstance") -> Optional[str]:
isa_folder = self._get_isa_folder_path(dataset)

if os.path.exists(isa_folder):

# Get ISA archive older
isa_files = os.listdir(isa_folder)

Expand Down Expand Up @@ -350,7 +349,6 @@ def __init__(self, **kwd):
################################################################

def _make_investigation_instance(self, filename: str) -> "Investigation":

# Parse ISA-Tab investigation file
parser = isatab_meta.InvestigationParser()
isa_dir = os.path.dirname(filename)
Expand Down Expand Up @@ -384,7 +382,6 @@ def __init__(self, **kwd):
################################################################

def _make_investigation_instance(self, filename: str) -> "Investigation":

# Parse JSON file
with open(filename, newline="", encoding="utf8") as fp:
isa = isajson.load(fp)
Expand Down
2 changes: 0 additions & 2 deletions lib/galaxy/datatypes/media.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,6 @@ def ffprobe(path):


class Audio(Binary):

MetadataElement(
name="duration",
default=0,
Expand Down Expand Up @@ -90,7 +89,6 @@ def set_meta(self, dataset: "DatasetInstance", overwrite: bool = True, **kwd) ->


class Video(Binary):

MetadataElement(
name="resolution_w",
default=0,
Expand Down
1 change: 0 additions & 1 deletion lib/galaxy/datatypes/msa.py
Original file line number Diff line number Diff line change
Expand Up @@ -246,7 +246,6 @@ def _write_part_stockholm_file(accumulated_lines):
part_file.writelines(accumulated_lines)

try:

stockholm_records = _read_stockholm_records(input_files[0])
stockholm_lines_accumulated = []
for counter, stockholm_record in enumerate(stockholm_records, start=1):
Expand Down
1 change: 0 additions & 1 deletion lib/galaxy/datatypes/speech.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,6 @@ class TextGrid(Text):
)

def sniff(self, filename: str) -> bool:

with open(filename) as fd:
text = fd.read(len(self.header))
return text == self.header
Expand Down
2 changes: 0 additions & 2 deletions lib/galaxy/datatypes/tabular.py
Original file line number Diff line number Diff line change
Expand Up @@ -1813,9 +1813,7 @@ def set_meta(
for i, line in enumerate(dataset_fh):
line = line.strip("\n")
if line.startswith("#"):

if line.startswith("#h"):

column_headers = line.split("\t")[1:]
elif line.startswith("#f"):
cleaned_column_types = []
Expand Down
2 changes: 1 addition & 1 deletion lib/galaxy/datatypes/text.py
Original file line number Diff line number Diff line change
Expand Up @@ -443,7 +443,7 @@ def _transform_dict_list_ids(dict_list):
return []

b_transform = {"rows": _transform_dict_list_ids, "columns": _transform_dict_list_ids}
for (m_name, b_name) in [
for m_name, b_name in [
("table_rows", "rows"),
("table_matrix_element_type", "matrix_element_type"),
("table_format", "format"),
Expand Down
2 changes: 0 additions & 2 deletions lib/galaxy/datatypes/util/maf_utilities.py
Original file line number Diff line number Diff line change
Expand Up @@ -134,7 +134,6 @@ def __del__(self):

# an object corresponding to a reference layered alignment
class RegionAlignment:

DNA_COMPLEMENT = maketrans("ACGTacgt", "TGCAtgca")
MAX_SEQUENCE_SIZE = sys.maxsize # Maximum length of sequence allowed

Expand Down Expand Up @@ -226,7 +225,6 @@ def __init__(self, start, end, species=None, temp_file_handler=None):


class SplicedAlignment:

DNA_COMPLEMENT = maketrans("ACGTacgt", "TGCAtgca")

def __init__(self, exon_starts, exon_ends, species=None, temp_file_handler=None):
Expand Down
2 changes: 1 addition & 1 deletion lib/galaxy/files/sources/posix.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ def _list(self, path="/", recursive=True, user_context=None):
if recursive:
res: List[Dict[str, Any]] = []
effective_root = self._effective_root(user_context)
for (p, dirs, files) in safe_walk(dir_path, allowlist=self._allowlist):
for p, dirs, files in safe_walk(dir_path, allowlist=self._allowlist):
rel_dir = os.path.relpath(p, effective_root)
to_dict = functools.partial(self._resource_info_to_dict, rel_dir, user_context=user_context)
res.extend(map(to_dict, dirs))
Expand Down
3 changes: 1 addition & 2 deletions lib/galaxy/job_execution/actions/post.py
Original file line number Diff line number Diff line change
Expand Up @@ -402,7 +402,7 @@ def execute(cls, app, sa_session, action, job, replacement_dict, final_job_state
)
else:
creating_jobs.append((input_dataset, input_dataset.dataset.creating_job))
for (input_dataset, creating_job) in creating_jobs:
for input_dataset, creating_job in creating_jobs:
sa_session.refresh(creating_job)
sa_session.refresh(input_dataset)
for input_dataset in [
Expand Down Expand Up @@ -497,7 +497,6 @@ def _execute(cls, tag_handler, user, output, tags):


class ActionBox:

actions = {
"RenameDatasetAction": RenameDatasetAction,
"HideDatasetAction": HideDatasetAction,
Expand Down
3 changes: 1 addition & 2 deletions lib/galaxy/job_execution/output_collect.py
Original file line number Diff line number Diff line change
Expand Up @@ -189,7 +189,6 @@ def collect_dynamic_outputs(


class BaseJobContext(ModelPersistenceContext):

max_discovered_files: Union[int, float]
tool_provided_metadata: BaseToolProvidedMetadata
job_working_directory: str
Expand Down Expand Up @@ -575,7 +574,7 @@ def discover_files(output_name, tool_provided_metadata, extra_file_collectors, j
JsonCollectedDatasetMatch(dataset, extra_file_collector, filename, path=path),
)
else:
for (match, collector) in walk_over_file_collectors(extra_file_collectors, job_working_directory, matchable):
for match, collector in walk_over_file_collectors(extra_file_collectors, job_working_directory, matchable):
yield DiscoveredFile(match.path, collector, match)


Expand Down
2 changes: 1 addition & 1 deletion lib/galaxy/job_execution/setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -222,7 +222,7 @@ def get_output_path(self, dataset):
if getattr(dataset, "fake_dataset_association", False):
return dataset.file_name
assert dataset.id is not None, f"{dataset} needs to be flushed to find output path"
for (hda, dataset_path) in self.output_hdas_and_paths.values():
for hda, dataset_path in self.output_hdas_and_paths.values():
if hda.id == dataset.id:
return dataset_path
raise KeyError(f"Couldn't find job output for [{dataset}] in [{self.output_hdas_and_paths.values()}]")
Expand Down
13 changes: 0 additions & 13 deletions lib/galaxy/jobs/dynamic_tool_destination.py
Original file line number Diff line number Diff line change
Expand Up @@ -416,7 +416,6 @@ def __validate_destination(cls, valid_rule: bool, app, return_bool: bool, rule:
valid_rule = False
elif isinstance(rule["destination"], dict):
if "priority" in rule["destination"] and isinstance(rule["destination"]["priority"], dict):

for priority in rule["destination"]["priority"]:
if priority not in priority_list:
error = "Invalid priority '"
Expand Down Expand Up @@ -530,7 +529,6 @@ def __validate_bounds(cls, valid_rule, return_bool, rule, tool, counter):
upper_bound = -1

if upper_bound != -1 and lower_bound > upper_bound:

error = f"lower_bound exceeds upper_bound for rule {str(counter)}"
error += f" in '{str(tool)}'."
if not return_bool:
Expand Down Expand Up @@ -850,11 +848,9 @@ def infinite_defaultdict():
valid_config = False

elif isinstance(obj["default_destination"], dict):

if "priority" in obj["default_destination"] and isinstance(
obj["default_destination"]["priority"], dict
):

for priority in obj["default_destination"]["priority"]:
if isinstance(obj["default_destination"]["priority"][priority], str):
priority_list.add(priority)
Expand Down Expand Up @@ -936,7 +932,6 @@ def infinite_defaultdict():

if isinstance(curr, dict):
if "priority" in curr and isinstance(curr["priority"], str):

if curr["priority"] in priority_list:
new_config["users"][user]["priority"] = curr["priority"]
else:
Expand Down Expand Up @@ -980,7 +975,6 @@ def infinite_defaultdict():
curr_tool_rules = []

if curr is not None:

# in each tool, there should always be only 2 sub-categories:
# default_destination (not mandatory) and rules (mandatory)
if "default_destination" in curr:
Expand All @@ -998,16 +992,13 @@ def infinite_defaultdict():
else:
valid_config = False
elif isinstance(curr["default_destination"], dict):

if "priority" in curr["default_destination"] and isinstance(
curr["default_destination"]["priority"], dict
):

for priority in curr["default_destination"]["priority"]:
destination = curr["default_destination"]["priority"][priority]
if priority in priority_list:
if isinstance(destination, str):

is_valid = validate_destination(
app,
destination,
Expand Down Expand Up @@ -1401,7 +1392,6 @@ def map_tool_to_destination(job, app, tool, user_email, test=False, path=None, j
if fail_message is not None:
destination = "fail"
elif config is not None:

# Get the default priority from the config if necessary.
# If there isn't one, choose an arbitrary one as a fallback
if "default_destination" in config:
Expand Down Expand Up @@ -1477,7 +1467,6 @@ def map_tool_to_destination(job, app, tool, user_email, test=False, path=None, j
if user_authorized:
matched = False
if rule["rule_type"] == "file_size":

# bounds comparisons
upper_bound = str_to_bytes(rule["upper_bound"])
lower_bound = str_to_bytes(rule["lower_bound"])
Expand All @@ -1491,7 +1480,6 @@ def map_tool_to_destination(job, app, tool, user_email, test=False, path=None, j
matched = True

elif rule["rule_type"] == "num_input_datasets":

# bounds comparisons
upper_bound = rule["upper_bound"]
lower_bound = rule["lower_bound"]
Expand All @@ -1504,7 +1492,6 @@ def map_tool_to_destination(job, app, tool, user_email, test=False, path=None, j
matched = True

elif rule["rule_type"] == "records":

# bounds comparisons
upper_bound = str_to_bytes(rule["upper_bound"])
lower_bound = str_to_bytes(rule["lower_bound"])
Expand Down
Loading

0 comments on commit 30f6049

Please sign in to comment.