Skip to content

Commit

Permalink
Update basic typing (#362)
Browse files Browse the repository at this point in the history
  • Loading branch information
jrapin authored Dec 10, 2019
1 parent d3f58c6 commit 96e2454
Show file tree
Hide file tree
Showing 9 changed files with 117 additions and 102 deletions.
2 changes: 1 addition & 1 deletion .pylintrc
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ extension-pkg-whitelist=numpy,nose,nose.tools,numpy.testing, pathlib.PurePath, p

[MESSAGES CONTROL]
# disabled messages
disable=invalid-name,missing-docstring,too-few-public-methods, protected-access, import-error, no-self-use, fixme, no-else-return, no-member
disable=invalid-name,missing-docstring,too-few-public-methods, protected-access, import-error, no-self-use, fixme, no-else-return, no-member, useless-import-alias

[TYPECHECK]
ignored-modules = numpy, numpy.testing
Expand Down
22 changes: 14 additions & 8 deletions nevergrad/benchmark/exporttable.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,19 @@
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
#
# Utils for exporting a data table in latex.
# Used in competence maps.
import typing as t

def remove_parens(data):

def remove_parens(data: t.List[t.List[str]]) -> t.List[t.List[str]]:
# If data[i][j] contains a "(", we keep only the part before that "(".
return [[d[:d.index("(")] if "(" in d else d for d in datarow] for datarow in data]


def export_table(filename, rows, cols, data):

def export_table(filename: str, rows: t.List[t.Any], cols: t.List[t.Any], data: t.List[t.List[str]]) -> None:
"""Exports data in filename with rows and cols as described.
More precisely, rows specifies the row names, cols specifies the col names,
and data[i][j] corresponds to the data in row rows[i] and col cols[j].
Expand All @@ -19,7 +26,7 @@ def export_table(filename, rows, cols, data):
rows = [str(r) for r in rows]
cols = [str(r) for r in cols]
# Latex syntax.
data = [[d.replace("%", "\%").replace("_", "") for d in datarow] for datarow in data]
data = [[d.replace("%", r"\%").replace("_", "") for d in datarow] for datarow in data]
data = remove_parens(data)
print("filename=", filename)
print("rows=", rows)
Expand All @@ -39,14 +46,14 @@ def export_table(filename, rows, cols, data):
f.write("\\lccode`7=`7\n")
f.write("\\lccode`8=`8\n")
f.write("\\lccode`9=`9\n")
f.write("\\newcolumntype{P}[1]{>{\hspace{0pt}}p{#1}}\n")
f.write(r"\\newcolumntype{P}[1]{>{\hspace{0pt}}p{#1}}\n")
f.write("\\begin{document}\n")
f.write("\\scriptsize\n")
f.write("\\renewcommand{\\arraystretch}{1.5}\n")
f.write("\\sloppy\n")
p = str(1./(2+len(cols)))
p = str(1.0 / (2 + len(cols)))
# f.write("\\begin{landscape}\n")
f.write("\\begin{tabular}{|P{" + p +"\\textwidth}|" + ("P{" + p + "\\textwidth}|") * len(cols) + "}\n")
f.write("\\begin{tabular}{|P{" + p + "\\textwidth}|" + ("P{" + p + "\\textwidth}|") * len(cols) + "}\n")
f.write("\\hline\n")
f.write(" & " + "&".join(cols) + "\\\\\n")
f.write("\\hline\n")
Expand All @@ -62,4 +69,3 @@ def export_table(filename, rows, cols, data):
f.write("\\end{tabular}\n")
# f.write("\\end{landscape}\n")
f.write("\\end{document}\n")
pass
2 changes: 1 addition & 1 deletion nevergrad/benchmark/xpbase.py
Original file line number Diff line number Diff line change
Expand Up @@ -217,7 +217,7 @@ def _run_with_error(self, callbacks: Optional[Dict[str, base._OptimCallBack]] =
# Note: when resuming a job (if optimizer is not None), seeding is pointless (reproducibility is lost)
np.random.seed(self.seed) # seeds both functions and instrumentation (for which random state init is lazy)
random.seed(self.seed)
torch.manual_seed(self.seed)
torch.manual_seed(self.seed) # type: ignore
# optimizer instantiation can be slow and is done only here to make xp iterators very fast
if self._optimizer is None:
self._optimizer = self.optimsettings.instantiate(instrumentation=instrumentation)
Expand Down
30 changes: 17 additions & 13 deletions nevergrad/examples/powersystem.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,17 @@
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
#
# run this with:
# echo 'import nevergrad.examples.powersystem' | python
# echo 'import nevergrad.examples.powersystem' | python
# or (e.g. MacOS):
# echo 'import nevergrad.examples.powersystem' | pythonw
# echo 'import nevergrad.examples.powersystem' | pythonw

import nevergrad as ng
from nevergrad.functions.powersystems.core import PowerSystem
from nevergrad.optimization import optimizerlib

budget=3500
budget = 3500
width = 6
depth = 6
num_dams = 6
Expand All @@ -23,12 +28,11 @@
# num_years: int = 1, # Number of years.
# failure_cost: float = 500., # Cost of not satisfying the demand. Equivalent to an expensive infinite capacity thermal plant.

power_system_loss = PowerSystem(num_dams=num_dams, depth=depth, width=width, year_to_day_ratio=year_to_day_ratio, back_to_normal=back_to_normal, num_thermal_plants=num_thermal_plants, constant_to_year_ratio=constant_to_year_ratio)
optimizer = optimizerlib.SplitOptimizer9(instrumentation=power_system_loss.dimension, budget=budget, num_workers=10)
optimizer.minimize(power_system_loss)
power_system_loss(optimizer.provide_recommendation().data)
power_system_loss.make_plots(f"ps_{num_dams}dams_{depth}_{width}_ytdr{year_to_day_ratio}_btn{back_to_normal}_num_thermal_plants{num_thermal_plants}_ctyr{constant_to_year_ratio}_budget{budget}.png")




power_system_loss = PowerSystem(num_dams=num_dams, depth=depth, width=width, year_to_day_ratio=year_to_day_ratio,
back_to_normal=back_to_normal, num_thermal_plants=num_thermal_plants,
constant_to_year_ratio=constant_to_year_ratio)
optimizer = ng.optimizers.SplitOptimizer(instrumentation=power_system_loss.dimension, budget=budget, num_workers=10)
optimizer.minimize(power_system_loss)
power_system_loss(optimizer.provide_recommendation().data)
power_system_loss.make_plots(f"ps_{num_dams}dams_{depth}_{width}_ytdr{year_to_day_ratio}_btn{back_to_normal}"
f"_num_thermal_plants{num_thermal_plants}_ctyr{constant_to_year_ratio}_budget{budget}.png")
2 changes: 1 addition & 1 deletion nevergrad/functions/multiobjective/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,4 +3,4 @@
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.

from .core import MultiobjectiveFunction
from .core import MultiobjectiveFunction as MultiobjectiveFunction
6 changes: 3 additions & 3 deletions nevergrad/functions/multiobjective/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ class MultiobjectiveFunction:
- The minimum value obtained for this objective function is -h,
where h is the hypervolume of the Pareto front obtained, given upper_bounds as a reference point.
- The callable keeps track of the pareto_front (see attribute paretor_front) and is therefor stateful.
For this reasonm it cannot be distributed. A user can however call the multiobjective_function
For this reason it cannot be distributed. A user can however call the multiobjective_function
remotely, and aggregate locally. This is what happens in the "minimize" method of optimizers.
"""

Expand All @@ -47,7 +47,7 @@ def compute_aggregate_loss(self, losses: ArrayLike, *args: Any, **kwargs: Any) -
"""
# We compute the hypervolume
if (losses - self._upper_bounds > 0).any():
return np.max(losses - self._upper_bounds)
return np.max(losses - self._upper_bounds) # type: ignore
arr_losses = np.minimum(np.array(losses, copy=False), self._upper_bounds)
new_volume: float = self._hypervolume.compute([y for _, y in self._points] + [arr_losses])
if new_volume > self._best_volume: # This point is good! Let us give him a great mono-fitness value.
Expand All @@ -60,7 +60,7 @@ def compute_aggregate_loss(self, losses: ArrayLike, *args: Any, **kwargs: Any) -
else:
# Now we compute for each axis
# First we prune.
self.pareto_front
self.pareto_front # pylint: disable=pointless-statement
distance_to_pareto = float("Inf")
for _, stored_losses in self._points:
if (stored_losses <= arr_losses).all():
Expand Down
Loading

0 comments on commit 96e2454

Please sign in to comment.