Commit b255052b authored by Uwe Köckemann's avatar Uwe Köckemann
Browse files

Working version

parent 04b3c209
......@@ -65,9 +65,9 @@
(#tuple moving-targets-problem
(
examples:$examples ;; Regular supervised ML problem
loss-function:MeanSquareError ;; Select available loss function
loss-function:MeanSquaredError ;; Select available loss function
constraints:{
(> y -30) ;; Each element in set needs to be translated to PySMT
(>= y -30) ;; Each element in set needs to be translated to PySMT
;;(fairness-1 y 1.0)
}
alpha:1 ;; Parameter alpha
......
......@@ -7,12 +7,16 @@ from aiddl_core.representation.tuple import Tuple
from aiddl_core.container.container import Container
import aiddl_core.function.default as dfun
from aiddl_core.tools.logger import Logger
import aiddl_core.parser.parser as parser
from aiddl_network.grpc_function import GrpcFunction
from aiddl_network.aiddl_grpc_server import AiddlServicer
from aiddl_network.aiddl_grpc_server import LOADER_URI
from moving_target_cplex import MovingTargetRegCplex
# Loaded modules (aka AIDDL files) go to container:
C = Container()
......@@ -47,12 +51,24 @@ dt_learner_conf = parser.parse_term('''
config:{}
)
''')
ls_expansion = parser.parse_term('''
(
name:Expansion
module:my
class:org.aiddl.common.learning.linear_regression.ExpansionFunction
config:{}
)
''')
f_create.apply(ls_expansion)
ls_learner_conf = parser.parse_term('''
(
name:LeastSquares
module:my
class:org.aiddl.common.learning.linear_regression.LinearRegression
config:{}
config:{ expansion:^my.Expansion }
)
''')
......@@ -84,11 +100,11 @@ f_LAP = GrpcFunction(host, port, lap_uri)
# Finally, we can apply mean square error to data:
weights = f_MSE.apply(example_data)
print("Weights:", weights)
# print("Weights:", weights)
y_k = f_LAP.apply(example_data)
# y_k = f_LAP.apply(example_data)
print("Result:", y_k)
# print("Result:", y_k)
# Create local function to expand data and
# hook it into least squares function running on server
......@@ -98,3 +114,13 @@ print("Result:", y_k)
# F.add_function(ExpandData(), Symbolic("my.expander"))
mtc = MovingTargetRegCplex(f_LAP, n=30)
mt_data = C.get_entry(Symbolic("moving-targets-problem"), module=example_module_uri).get_value()
mt_data = mt_data.resolve(C)
# print(Logger.pretty_print(mt_data, 0))
mtc.apply(mt_data)
from _typeshed import NoneType
# from _typeshed import NoneType
from abc import ABC, abstractmethod
from re import A
import numpy as np
from aiddl_core.representation.list import List
from aiddl_core.representation.symbolic import Symbolic
from aiddl_core.representation.real import Real
from aiddl_core.representation.tuple import Tuple
class MovingTarget(ABC):
def __init__(self, n=10):
def __init__(self, ML, n=10):
self.n = n
self.ML = ML
def apply(self, args):
d = args[Symbolic("examples")][Symbolic("data")] # d = (x_s, y_s)
ml_problem = args[Symbolic("examples")]
d = self.convert_data(args[Symbolic("examples")]) # d = (x_s, y_s)
L = args[Symbolic("loss-function")]
C = args[Symbolic("constraints")]
alpha = args[Symbolic("alpha")]
......@@ -25,19 +30,21 @@ class MovingTarget(ABC):
# self.set_loss_function(M, L, d)
# d_data = self.get_pysmt_data(y_k)
y_k = self.ML.get_function().apply(d)
y_k = self.ML.apply(ml_problem)
for i in range(0, self.n):
sat_C = self.check_constraints_ext(M, C, d)
sat_C = self.check_constraints_ext(M, C, (d[0], np.array(y_k.unpack())))
if not sat_C:
self.m_alpha(M, L, y_k, alpha)
else:
self.m_beta(M, L, y_k, beta)
print("-------------------------")
z_k = self.solve_ext(M)
ml_problem = self.assemble_ml_problem(args, z_k)
print(z_k)
ml_problem = self.assemble_ml_problem(ml_problem, z_k)
y_k = self.ML.apply(ml_problem)
print(y_k)
@abstractmethod
def add_constraints(self, M, C, y_s):
......@@ -63,14 +70,37 @@ class MovingTarget(ABC):
def initialize_ext(self, d):
"""Initialize external solver and return blank model."""
def convert_data(self, current):
print(current)
label = current[Symbolic('label')]
attributes = current[Symbolic('attributes')]
data = current[Symbolic('data')]
label_idx = None
for i in range(len(attributes)):
if attributes[i][0] == label:
label_idx = i
break
y = []
x = []
for i in range(len(data)):
y.append(data[i][label_idx].real_value())
x_row = []
for j in range(len(data[i])):
if j != label_idx:
x_row.append(data[i][j].real_value())
x.append(x_row)
x = np.array(x)
y = np.array(y)
return x, y
def assemble_ml_problem(self, current, y_k):
"""Insert current label vector into machine learning problem."""
label = current[Symbolic('label')]
attributes = current[Symbolic['attributes']]
data = current[Symbolic['data']]
attributes = current[Symbolic('attributes')]
data = current[Symbolic('data')]
label_idx = None
for i in range(len(attributes)):
if A[i][0] == label:
if attributes[i][0] == label:
label_idx = i
break
new_data = []
......@@ -78,9 +108,9 @@ class MovingTarget(ABC):
row = []
for j in range(len(data[i])):
if j == label_idx:
row.append(y_k[i])
row.append(Real(y_k[i]))
else:
row.append(data[i][j])
row.append(Real(data[i][j]))
new_data.append(Tuple(row))
new_data = List(new_data)
problem = current.put(Symbolic('data'), new_data)
......
import numpy as np
from aiddl_core.representation.symbolic import Symbolic
from moving_target_abc import MovingTarget
from docplex.mp.model import Model as CPModel
from docplex.mp.model import DOcplexException
......@@ -34,8 +36,13 @@ class MovingTargetRegCplex(MovingTarget):
]
"""
for c in C:
ctype, cvar, cval = c
print(c)
ctype = c[0].string_value()
cvar = c[1].string_value()
cval = c[2].real_value()
# ctype, cvar, cval = c
# Store the constraint in the class.
cstr = InequalityGlobalConstraint('ct', ctype, cval)
self.constraints.append(cstr)
......@@ -57,20 +64,20 @@ class MovingTargetRegCplex(MovingTarget):
n_points = len(y_k)
idx_var = [i for i in range(n_points)]
x = [M.get_var_by_name(s) for s in self.variables]
y_k = y_k.flatten()
y_k = np.array(y_k.unpack())
if L == 'MSE' or L == 'MeanSquaredError':
if L == Symbolic('MSE') or L == Symbolic('MeanSquaredError'):
y_loss = (1.0 / n_points) * M.sum([(self.y[i] - x[i]) * (self.y[i] - x[i]) for i in idx_var])
p_loss = (1.0 / n_points) * M.sum([(y_k[i] - x[i]) * (y_k[i] - x[i]) for i in idx_var])
elif L == 'MAE' or L == 'MeanAbsoluteError':
elif L == Symbolic('MAE') or L == Symbolic('MeanAbsoluteError'):
y_loss = (1.0 / n_points) * M.sum([M.abs(self.y[i] - x[i]) for i in idx_var])
p_loss = (1.0 / n_points) * M.sum([M.abs(y_k[i] - x[i]) for i in idx_var])
else:
raise NotImplementedError("Loss function not recognized!")
obj_func = y_loss + (1.0 / alpha) * p_loss
obj_func = y_loss + (1.0 / alpha.real_value()) * p_loss
M.minimize(obj_func)
def m_beta(self, M, L, y_k, beta):
......@@ -78,13 +85,13 @@ class MovingTargetRegCplex(MovingTarget):
n_points = len(y_k)
idx_var = [i for i in range(n_points)]
x = [M.get_var_by_name(s) for s in self.variables]
y_k = y_k.flatten()
y_k = np.array(y_k.unpack())
if L == 'MSE' or L == 'MeanSquaredError':
if L == Symbolic('MSE') or L == Symbolic('MeanSquaredError'):
y_loss = (1.0 / n_points) * M.sum([(self.y[i] - x[i]) * (self.y[i] - x[i]) for i in idx_var])
p_loss = (1.0 / n_points) * M.sum([(y_k[i] - x[i]) * (y_k[i] - x[i]) for i in idx_var])
elif L == 'MAE' or L == 'MeanAbsoluteError':
elif L == Symbolic('MAE') or L == Symbolic('MeanAbsoluteError'):
y_loss = (1.0 / n_points) * M.sum([M.abs(self.y[i] - x[i]) for i in idx_var])
p_loss = (1.0 / n_points) * M.sum([M.abs(y_k[i] - x[i]) for i in idx_var])
......@@ -92,7 +99,7 @@ class MovingTargetRegCplex(MovingTarget):
raise NotImplementedError("Loss function not recognized!")
obj_func = y_loss
M.add(p_loss <= beta)
M.add(p_loss <= beta.real_value())
M.minimize(obj_func)
def check_constraints_ext(self, M, C, d):
......@@ -115,6 +122,8 @@ class MovingTargetRegCplex(MovingTarget):
x = [M.get_var_by_name(s) for s in self.variables]
y_opt = np.array([_x.solution_value for _x in x])
print("y_opt:", y_opt)
return y_opt
def initialize_ext(self, d, name='cplex_model'):
......@@ -131,7 +140,7 @@ class MovingTargetRegCplex(MovingTarget):
# Variable declaration.
n_points = len(y)
idx_var = [i for i in range(n_points)]
mod.continuous_var_list(keys=idx_var, lb=-10.0, ub=10.0, name='y')
mod.continuous_var_list(keys=idx_var, lb=min(y)-10.0, name='y')
# Store variable names.
self.variables = ['y_%d' %i for i in idx_var]
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment