Commit 3029cb68 authored by Paulo Medeiros's avatar Paulo Medeiros
Browse files

Replace some formatting with f-strings

parent 5fdf0eda
......@@ -16,7 +16,7 @@ from .commands_functions import (
)
class StoreDictKeyPair(argparse.Action):
class StoreDictKeyPair(argparse.Action): # pylint: disable=too-few-public-methods
"""Enable args="key1=val1, ..., keyN=valN" in command line args."""
# Source: <https://stackoverflow.com/questions/29986185/
......
......@@ -163,7 +163,7 @@ def run_clustering_on_df(
method = config.general.clustering_method.lower()
# Compute clustering using DBSCAN or HDBSCAN
if method not in ["dbscan", "hdbscan", "rsl", "optics"]:
raise NotImplementedError('Method "{}" not available.'.format(method))
raise NotImplementedError(f'Method "{method}" not available.')
if len(df.index) == 0:
logger.warning("Dataframe has no rows")
df["cluster_label"] = None
......
......@@ -70,9 +70,8 @@ def cluster_obs_single_dtg(args):
if args.savefig:
# Create outdir at the beginning so users don't
# waste time in case they can't save results
outdir = config.general.outdir / "{}_netatmoqc_cluster".format(
datetime.now().strftime("%Y-%m-%d_%H.%M.%S")
)
now_as_str = datetime.now().strftime("%Y-%m-%d_%H.%M.%S")
outdir = config.general.outdir / f"{now_as_str}_netatmoqc_cluster"
# Allow mkdir to raise eventual exceptions if cannot write to outdir
outdir.mkdir(parents=True)
......@@ -169,9 +168,9 @@ def select_stations(args):
# Create outdir at the beginning so users don't
# waste time in case they can't save results
outdir = config.general.outdir / "{}_netatmoqc_select".format(
datetime.now().strftime("%Y-%m-%d_%H.%M.%S")
)
now_as_str = datetime.now().strftime("%Y-%m-%d_%H.%M.%S")
outdir = config.general.outdir / f"{now_as_str}_netatmoqc_select"
# Allow mkdir to raise eventual exceptions if cannot write to outdir
outdir.mkdir(parents=True)
......@@ -440,9 +439,9 @@ def csv2obsoul(args):
# Create outdir at the beginning so users don't
# waste time in case they can't save results
outdir = config.general.outdir / "{}_netatmoqc_csv2obsoul".format(
datetime.now().strftime("%Y-%m-%d_%H.%M.%S")
)
now_as_str = datetime.now().strftime("%Y-%m-%d_%H.%M.%S")
outdir = config.general.outdir / f"{now_as_str}_netatmoqc_csv2obsoul"
# Allow mkdir to raise eventual exceptions if cannot write to outdir
outdir.mkdir(parents=True)
......@@ -459,7 +458,7 @@ def csv2obsoul(args):
else:
raise NotImplementedError(
'Only csv files supported in "--selected-stations-fpath". '
"Received '%s'." % (args.selected_stations_fpath),
f"Received '{args.selected_stations_fpath}'."
)
netatmoqc_input2output(
......@@ -497,9 +496,8 @@ def thin_data_from_csv_files(args):
domain = Domain.construct_from_dict(config.domain)
outdir_prefix = config.general.outdir / "{}_netatmoqc_thin".format(
datetime.now().strftime("%Y-%m-%d_%H.%M.%S")
)
now_as_str = datetime.now().strftime("%Y-%m-%d_%H.%M.%S")
outdir_prefix = config.general.outdir / f"{now_as_str}_netatmoqc_thin"
# Parse input paths. Keep file paths as they are, and find csv files
# recursively for paths that are directories.
......@@ -543,7 +541,7 @@ def thin_data_from_csv_files(args):
######################################
def _open_file_with_default_app(fpath):
if platform.system() == "Windows":
os.startfile(fpath)
os.startfile(fpath) # pylint: disable=no-member
elif platform.system() == "Darwin":
subprocess.call(("open", fpath))
else:
......
......@@ -65,7 +65,7 @@ class UndefinedValueType:
if self._return_self_on_attr_error:
return self
raise AttributeError(
"'{}' object has no attribute '{}'".format(self.__class__.__name__, item)
f"'{self.__class__.__name__}' object has no attribute '{item}'"
)
def __copy__(self):
......@@ -264,7 +264,7 @@ class _ConfigMetadataRegistry(ConfigDict):
try:
# key -> section.key.
# "section" will be defined using a context manager
key = "{}.{}".format(section, key)
key = f"{section}.{key}"
except NameError:
pass
......@@ -293,7 +293,7 @@ class _ConfigMetadataRegistry(ConfigDict):
key (str): String such that the destination where the template
will be copied into is section.key.
"""
dest_key = "{}.{}".format(section, key)
dest_key = f"{section}.{key}"
if "." in section:
template_key, _, _dropped = section.rpartition(".")
template_key = "_template." + template_key + "." + key
......@@ -674,7 +674,7 @@ def _fill_defaults(raw, recog_configs=config_metadata, parent_keys=()):
parsed[key] = _fill_defaults(user_value, metadata, all_keys)
else:
raise TypeError(
"Expected only _MetadataDict or ConfigDict, got %s" % (type(metadata))
f"Expected only _MetadataDict or ConfigDict, got {type(metadata)}"
)
return parsed
......@@ -741,8 +741,7 @@ class ParsedConfig:
if default is NoDefaultProvided:
if self.global_default is NoDefaultProvided:
raise UndefinedConfigValueError(
'Config has no value for "{}", '.format(item)
+ "and no default has been passed."
f'Config has no value for "{item}", with no default passed.'
)
logger.debug(
......
......@@ -49,7 +49,7 @@ class Grid2D:
def _validated_axis_info(axis):
if not isinstance(axis, GridAxisConfig):
raise TypeError(
"expected type 'GridAxisConfig', got '%s' instead" % (type(axis).__name__)
f"expected type 'GridAxisConfig', got '{type(axis).__name__}' instead"
)
return axis
......
......@@ -34,7 +34,7 @@ class Dtg(datetime):
Args:
*args: Positional args passed to datetime.
*kwargs: Keyword args passed to datetime.
**kwargs: Keyword args passed to datetime.
Attributes:
cycle_length (str): The length of the assimilation window.
......@@ -68,9 +68,9 @@ class Dtg(datetime):
if len(args) > 0 and not isinstance(args[0], int):
if not any(isinstance(args[0], c) for c in [str, datetime]):
raise TypeError(
"Cannot convert input '{}' ".format(args[0])
+ "of type '{}' ".format(type(args[0]).__name__)
+ "to {}".format(cls.__name__)
f"Cannot convert input '{args[0]}' "
+ f"of type '{type(args[0]).__name__}' "
+ f"to {cls.__name__}"
)
if len(args) > 1:
......@@ -125,7 +125,7 @@ class Dtg(datetime):
tzinfo=new_instance.tzinfo,
)
if new_instance.as_datetime() != eqv_valid_instance.as_datetime():
msg = "Dtg {} not allowed: ".format(new_instance.as_datetime())
msg = f"Dtg {new_instance.as_datetime()} not allowed: "
msg += "Minimum allowed Dtg subdivision is an hour"
raise ValueError(msg)
......@@ -227,13 +227,12 @@ class Dtg(datetime):
min_allowed_new_date = ref_date + min_cycle_length
if new_date < min_allowed_new_date:
raise ValueError(
"Min allowed cycle_length is "
+ str(min_cycle_length)
+ ". Passed cycle_length={}".format(cycle_length)
f"Min allowed cycle_length is {min_cycle_length}. "
+ f"Passed cycle_length={cycle_length}"
)
if not self.compatible_with_cycle_length(cycle_length):
raise ValueError(
"Dtg {} not compatible with cycle_length {}".format(self, cycle_length)
f"Dtg {self} not compatible with cycle_length {cycle_length}"
)
self._cycle_length = cycle_length
......@@ -285,10 +284,9 @@ class Dtg(datetime):
return self.strftime("%Y-%m-%dT%H %Z")
def __repr__(self):
return "{}('{}', cycle_length='{}')".format(
self.__class__.__name__,
self.isoformat(),
self.cycle_length.freqstr,
return (
f"{self.__class__.__name__}('{self.isoformat()}', "
f"cycle_length='{self.cycle_length.freqstr}')"
)
......@@ -338,8 +336,8 @@ class DtgContainer:
else:
raise ValueError(
"No (or inconsistent) 'cycle_length' info found in "
+ "{}, and no 'cycle_length' ".format(msg_which_input)
+ "passed to '{}'".format(self.__class__.__name__)
+ f"{msg_which_input}, and no 'cycle_length' "
+ f"passed to '{self.__class__.__name__}'"
)
elif len(data_c_length) > 0:
logger.warning(
......@@ -386,7 +384,7 @@ class DtgContainer:
def calc_nth_item(n):
if (n > len(self) - 1) or (n < -len(self)):
raise IndexError("{} index out of range".format(self.__class__.__name__))
raise IndexError(f"{self.__class__.__name__} index out of range")
return self._start + sign * (n % len(self)) * self.cycle_length
if isinstance(item, slice):
......@@ -407,12 +405,12 @@ class DtgContainer:
return not self.__eq__(other)
def __repr__(self):
rtn = "{} object:\n".format(self.__class__.__name__)
rtn = f"{self.__class__.__name__} object:\n"
if self._data is None:
rtn += " start={}, end={}\n".format(self._start, self._end)
rtn += f" start={self._start}, end={self._end}\n"
else:
rtn += " data=[{}]\n".format(", ".join(map(str, self._data)))
rtn += " cycle_length={}".format(self.cycle_length)
rtn += f" data=[{', '.join(map(str, self._data))}]\n"
rtn += f" cycle_length={self.cycle_length}"
return rtn
__str__ = __repr__
......@@ -18,7 +18,7 @@ class HsmDiagNotStoredInCompactForm(Exception):
self,
*args,
msg="Diagonal elements of HSM matrix are not stored in compact form",
**kwargs
**kwargs,
):
"""Initialise with a default exception message."""
super().__init__(msg, *args, **kwargs)
......@@ -327,7 +327,7 @@ class _MemSize:
self.bytes = size_in_bytes
def __repr__(self):
return "{}(size_in_bytes={})".format(type(self).__name__, self.bytes)
return f"{type(self).__name__}(size_in_bytes={self.bytes})"
def __str__(self):
return humanize.naturalsize(self.bytes)
......@@ -443,7 +443,7 @@ class HollowSymmetricMatrix(np.lib.mixins.NDArrayOperatorsMixin):
np.fill_diagonal(data, 0)
else:
raise ValueError(
"%s: ndim(data) should be 1 or 2. Got %s." % (cls.__name__, np.ndim(data))
f"{cls.__name__}: ndim(data) should be 1 or 2. Got {np.ndim(data)}."
)
return data
......@@ -607,7 +607,7 @@ class HollowSymmetricMatrix(np.lib.mixins.NDArrayOperatorsMixin):
"""Return (i, j) such that self.data[data_i] == self[i, j]."""
return _data_index_to_matrix_index(self.order, data_i)
def __array__(self, *args, **kwargs):
def __array__(self, *args, **kwargs): # pylint: disable=unused-argument
# See <https://numpy.org/devdocs/user/basics.dispatch.html>
# TODO: Remove the ".astype(np.float64)" when possible.
# HDBSCAN requires that the result of np.array(self) to have
......
......@@ -252,7 +252,7 @@ def read_netatmo_data_for_dates(dates, rootdir, **kwargs):
if len(data_from_all_files) == 0:
raise DataNotFoundError(
"Could not find data for date(s)={} under dir '{}'".format(dates, rootdir)
f"Could not find data for date(s)={dates} under dir '{rootdir}'"
)
return pd.concat(data_from_all_files, ignore_index=True)
......@@ -431,7 +431,7 @@ def read_netatmo_data_for_dtg(dtg, rootdir, **kwargs):
data_from_all_files[fpath.stem] = read_netatmo_csv(fpath, **kwargs)
if len(data_from_all_files) == 0:
raise DataNotFoundError(
"Could not find data for DTG={} under dir '{}'".format(dtg, rootdir)
f"Could not find data for DTG={dtg} under dir '{rootdir}'"
)
df = pd.concat(data_from_all_files, ignore_index=True)
......
......@@ -177,8 +177,7 @@ def calc_distance_matrix_haversine_plus(df, config):
allowed_methods = ["manhattan", "euclidean"]
if method not in allowed_methods:
raise NotImplementedError(
"Argument 'method' must be one of: %s. Received: %s"
% (", ".join(allowed_methods), method)
f"Argument 'method' must be one of: {', '.join(allowed_methods)}. Received: {method}"
)
return HollowSymmetricMatrix(
......@@ -333,8 +332,8 @@ def calc_distance_matrix(df, config, domain=None, num_threads=-1):
method = config.metrics.method.lower()
if method not in accepted_methods:
raise NotImplementedError(
"Distance matrix calc method '%s' not available. " % (method)
+ "Please choose method from: %s" % (", ".join(accepted_methods))
f"Distance matrix calc method '{method}' not available. "
+ f"Please choose method from: {', '.join(accepted_methods)}"
)
logger.debug("Computing distance matrix using the '%s' method", method)
......
......@@ -18,12 +18,7 @@ logger = logging.getLogger(__name__)
def mpi_parallel(fun, iterable):
"""Run function "fun" in parallel over "iterable" using MPI."""
if MPI4PY_IMPORT_ERROR is not None:
msg = "%s. %s%s%s" % (
MPI4PY_IMPORT_ERROR,
logcolor.red,
"Support to MPI is unavailable!",
logcolor.reset,
)
msg = f"{MPI4PY_IMPORT_ERROR}. {logcolor.red}Support to MPI is unavailable!{logcolor.reset}"
raise ImportError(msg) from MPI4PY_IMPORT_ERROR
# Prevent using mpiexec with n>1
......@@ -33,7 +28,7 @@ def mpi_parallel(fun, iterable):
if size > 1:
if rank == 0:
raise ValueError(
"Received '-n %d' from the MPI runner. Please " % (size)
f"Received '-n {size}' from the MPI runner. Please "
+ "use '-n 1' when running this application with MPI, "
+ "and then select the maximum number N of parallel "
+ "MPI tasks by passing '-usize N'"
......
......@@ -30,7 +30,7 @@ def get_obs_scattergeo_trace(df, trace_name=None, marker=None, visible=True):
if trace_name is None:
hovertemplate = []
else:
hovertemplate = ["<b>%s</b><br>" % (trace_name)]
hovertemplate = [f"<b>{trace_name}</b><br>"]
hoverinfo_cols = [c for c in df.columns if not c.startswith("_")]
for icol, col in enumerate(hoverinfo_cols):
fmt = None
......@@ -64,7 +64,7 @@ def draw_boundaries(
corners=None,
showlegend=True,
legendgroup=None,
**kwargs
**kwargs,
):
"""Add to fig line segments connecting the given corners within the domain.
......@@ -141,7 +141,7 @@ def draw_grid_pts(fig, grid, display_grid_max_gsize=None, name="Grid", **marker_
grid_draw_every = max(1, int(display_grid_max_gsize / grid.x_spacing))
lons, lats = grid.ij2lonlat_map()
if grid_draw_every > 1:
name += " (every %s point of)" % (humanize.ordinal(grid_draw_every))
name += f" (every {humanize.ordinal(grid_draw_every)} point of)"
fig.add_trace(
go.Scattergeo(
name=name,
......@@ -163,7 +163,7 @@ def get_domain_fig(
latrange=None,
ezone=None,
obs_df=None,
**kwargs
**kwargs,
):
"""Return map representation of "domain" (a ".domains.Domain" object)."""
# Should we plot the extension zone?
......@@ -215,9 +215,7 @@ def get_domain_fig(
height=800,
margin=dict(r=0, l=0, b=0),
title=dict(
text="{} Domain Boundaries and Grid ({} Projection)".format(
domain.name, domain.proj.full_name
),
text=f"{domain.name} Domain Boundaries and Grid ({domain.proj.full_name} Projection)",
xanchor="center",
x=0.5,
yanchor="top",
......@@ -340,7 +338,7 @@ def make_clustering_fig(df, domain, **kwargs):
label_count = label_counts[label]
elif label == -2:
# Outliers found after main clustering
legend_label = "Cluster {}, removed outliers:".format(orig_label)
legend_label = f"Cluster {orig_label}, removed outliers:"
label_count = orig_label_counts[orig_label] - label_counts[orig_label]
elif label == -3:
legend_label = "Outliers, preliminary clustering:"
......@@ -352,9 +350,9 @@ def make_clustering_fig(df, domain, **kwargs):
legend_label = "Moving stations:"
label_count = label_counts[label]
else:
legend_label = "Cluster {}, accepted:".format(int(label))
legend_label = f"Cluster {label}, accepted:"
label_count = label_counts[label]
legend_label += " {} obs".format(int(label_count))
legend_label += f" {label_count} obs"
return legend_label
set_plot_label = np.vectorize(_set_plot_label, otypes=[str])
......@@ -494,7 +492,7 @@ def show_cmd_get_fig_from_dataframes(args, dataframes, domain):
marker = dict(color="blue")
trace_name = Path(fname).stem.replace("_", " ").title()
trace_name += " (%d obs)" % (len(df.index))
trace_name += f" ({len(df.index)} obs)"
trace = get_obs_scattergeo_trace(
df, trace_name=trace_name, marker=marker, visible=trace_visible
)
......@@ -565,7 +563,7 @@ def init_fig_dict(domain, dataset_var, frame_duration):
fig_dict["frames"] = []
# Figure layout
fig_dict["layout"]["title"]["text"] = "NetAtmo Data: {}".format(dataset_var)
fig_dict["layout"]["title"]["text"] = f"NetAtmo Data: {dataset_var}"
fig_dict["layout"]["updatemenus"] = [
# <https://plotly.com/python/reference/layout/updatemenus>
......
......@@ -39,7 +39,7 @@ def save_df_as_netatmo_csv(df, path, overwrite=False):
"""
path = Path(path).resolve()
if path.exists() and not overwrite:
raise FileExistsError("File {} exists".format(path))
raise FileExistsError(f"File {path} exists")
path.parent.mkdir(parents=True, exist_ok=True)
if pd.api.types.is_datetime64_any_dtype(df["time_utc"]):
......@@ -105,7 +105,7 @@ def save_df_as_obsoul(df, fpath=None, export_params=None):
# Validate fpath and create parent dirs tree if not in place
if fpath is None:
fpath = Path() / "OBSOUL{}".format(dtg.strftime("%Y%m%d%H"))
fpath = Path() / f"OBSOUL{dtg.strftime('%Y%m%d%H')}"
fpath = Path(fpath)
fpath.parent.mkdir(parents=True, exist_ok=True)
......@@ -139,7 +139,7 @@ def save_df_as_obsoul(df, fpath=None, export_params=None):
analysis_time = dtg.strftime("%H")
with open(fpath, "w") as obsoul_file:
# The "date time" first line in the file
obsoul_file.write("{} {}\n".format(analysis_date, analysis_time))
obsoul_file.write(f"{analysis_date} {analysis_time}\n")
for row in df.itertuples(index=False):
# Construct the records. Each record has a header and multiple
......@@ -162,7 +162,7 @@ def save_df_as_obsoul(df, fpath=None, export_params=None):
obs_code,
row.lat,
row.lon,
"'{}'".format(row.id),
f"'{row.id}'",
obs_date,
obs_hour,
row.alt,
......@@ -240,9 +240,11 @@ def obs_timestamp2csv_rpath(timestamp):
f_minute = "45"
elif minute < 60:
f_minute = "55"
else:
raise ValueError(f"Something went wrong: Got {f_minute} minutes in an hour!")
fname = "%s%s00Z.csv" % (timestamp.strftime("%Y%m%dT%H"), f_minute)
return Path("%s/%s/%s/%s" % (year, month, day, fname))
fname = f"{timestamp.strftime('%Y%m%dT%H')}{f_minute}00Z.csv"
return Path(f"{year}/{month}/{day}/{fname}")
def _input2output_single_dtg(
......@@ -321,7 +323,7 @@ def _input2output_single_dtg(
df = df.drop("_f_rpath", axis=1)
if outdir_obsoul is not None:
fpath = Path(outdir_obsoul) / "OBSOUL{}".format(dtg.strftime("%Y%m%d%H"))
fpath = Path(outdir_obsoul) / f"OBSOUL{dtg.strftime('%Y%m%d%H')}"
logger.debug(
"%sSaving OBSOUL: DTG=%s, %d obs%s, file %s",
logcolor.cyan,
......
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment