Scipy / Mystic - Constraints violated after being specified

111 Views Asked by At

I'm trying to optimize a 10x5 matrix to maximize a return value y, using mystic. There are 2 types of constraints that I need to include:

  1. Each element must be in between a min and max range
  2. Some elements must equal specific values (provided in a dictionary)

Not sure why the optimized result includes negative values as I have already made a constraint for that (constraint no.1) and the return y value seems to be incredibly small?

import random
import pandas as pd
import numpy as np
import mystic.symbolic as ms
import mystic as my
import scipy.optimize as so

# Specifiying constraints
element_low_lim = 0
element_hi_lim = 1000
total_matrix_min_sum = 0
total_matrix_max_sum = 220000

# Create an input matrix where the values must equal a specific amount
matrix_in = np.zeros((52,5))
matrix_in[0,0] = 100


def constraints_func(element_low_lim, element_hi_lim, total_matrix_min_sum, total_matrix_max_sum,element_vals_dict):
    var_num = ['x'+str(i) for i in range(260)]

    #creating element bounds constraints
    constraint_bound = ''
    for var in var_num:
        if var not in element_vals_dict:
            constraint_bound += var + f' >= {element_low_lim}' + '\n' + var + f' <= {element_hi_lim}' + '\n'

    #creating sum of all the elements constraint
    constraint_matrix_sum = ' + '.join(var_num) + f' <= {total_matrix_max_sum}' + '\n' + ' + '.join(var_num) + f' >= {total_matrix_min_sum}'


    #creating element constraints
    constraint_elements = '\n'.join([var+' == '+str(element_vals_dict[var]) for var in element_vals_dict])

    # bundle all constraints
    constraint_equations = constraint_bound.lstrip() + constraint_matrix_sum.lstrip() + constraint_column_sum.lstrip() + constraint_elements.lstrip()

    return constraint_equations

equations = constraints_func(element_low_lim, element_hi_lim, total_matrix_min_sum, total_matrix_max_sum, column_sum_min_lst, column_sum_max_lst, element_vals_dict)
equations = ms.simplify(equations, all=True)
constrain = ms.generate_constraint(ms.generate_solvers(equations), join=my.constraints.and_)

# Define Objective function
def obj_func(matrix):
    return np.sum(output_matrix)

mon = my.monitors.VerboseMonitor(1)

objs = []
def callback(x):
    kx = constrain(x)
    y = -obj_func(x)
    mon(kx, y)
    objs.append(y)
   
# create a starting matrix
start_matrix = [random.randint(0,3) for i in range(200)]

# run optimizer
solution = so.minimize(obj_func, start_matrix, method='SLSQP', tol=0.01, options={'disp': True, 'maxiter':100}, callback=callback)
1

There are 1 best solutions below

5
Mike McKerns On

The issue you are running into is that mystic could use some help in simplifying the equations so that they are most likely to succeed when imposing the constraints.

You have x0, x1, and x2 as fixed. However, in your sum constraints, simplify will (by default) isolate the first variable in the list on the LHS -- where the isolated variable is the one that the constraints adjust. So, having x0 fixed, and then expecting it to be adjusted to resolve the sum constraints is a bad idea. mystic will attempt to resolve the constraints, but fail and give up.

What you want to do is isolate the "non-fixed" variables in the sum constraints. I've rewritten your code to do that.

>>> import random
>>> import pandas as pd
>>> import numpy as np
>>> import mystic.symbolic as ms
>>> import mystic as my
>>> import scipy.optimize as so
>>> 
>>> # Specifiying constraints
>>> element_low_lim = 0
>>> element_hi_lim = 20000
>>> total_matrix_min_sum = 0
>>> total_matrix_max_sum = 20000
>>> column_sum_min_lst = [0,0,0,0,0]
>>> column_sum_max_lst = [10000,2000,8000,0,0]
>>> 
>>> # Create an input matrix where the values must equal a specific amount
>>> matrix_in = np.zeros((52,5))
>>> matrix_in[0,0] = 100
>>> matrix_in[0,1] = 200
>>> matrix_in[0,2] = 300
>>>
>>> # Create a dictionary of elements constraints (i.e create a dict of where value is non-zero)
>>> flat_mat = matrix_in.flatten()
>>> nonzero_idx = np.where(flat_mat>0)
>>> nonzero_vals = flat_mat[nonzero_idx]
>>> element_vals_dict = dict(zip(['x'+str(i) for i in nonzero_idx[0]], nonzero_vals))

Broken up the constraints functions to make them easier to deal with in simplify.

>>> def constraints_func0(element_vals_dict):
...     #creating element constraints
...     constraint_elements = '\n'.join([var+' == '+str(element_vals_dict[var]) for var in element_vals_dict])
...     return constraint_elements.lstrip() + '\n'
... 
>>> def constraints_func1(element_low_lim, element_hi_lim):
...     var_num = ['x'+str(i) for i in range(260)]
...     #creating element bounds constraints
...     constraint_bound = ''
...     for var in var_num:
...         if var not in element_vals_dict:
...             constraint_bound += var + f' >= {element_low_lim}' + '\n' + var + f' <= {element_hi_lim}' + '\n'
...     return constraint_bound.lstrip()
... 
>>> def constraints_func2(total_matrix_min_sum, total_matrix_max_sum):
...     var_num = ['x'+str(i) for i in range(260)]
...     #creating sum of all the elements constraint
...     constraint_matrix_sum = ' + '.join(var_num) + f' <= {total_matrix_max_sum}' + '\n' + ' + '.join(var_num) + f' >= {total_matrix_min_sum}'
...     return constraint_matrix_sum.lstrip() + '\n'
... 
>>> def constraints_func3(column_sum_min_lst, column_sum_max_lst):
...     var_num = ['x'+str(i) for i in range(260)]
...     #creating column sum constraints
...     constraint_column_sum = ''
...     for i in range(5):
...         columns_var = var_num[i::5]
...         column_max_sum = ' + '.join(columns_var) + f' <= {column_sum_max_lst[i]}'
...         column_min_sum = ' + '.join(columns_var) + f' >= {column_sum_min_lst[i]}'
...         constraint_column_sum += '\n' + column_max_sum + '\n' + column_min_sum
...     constraint_column_sum += '\n'
...     return constraint_column_sum.lstrip()
... 
>>> 

I'll build the constraints now, but as there are some unnecessary constraints in eqn3, I'm going to get rid of them. Essentially, merge identifies that CON <= 0 and CON >= 0, together, don't apply any constraint.

>>> # build the constraints
>>> eqn0 = constraints_func0(element_vals_dict)
>>> eqn1 = constraints_func1(element_low_lim, element_hi_lim)
>>> eqn2 = constraints_func2(total_matrix_min_sum, total_matrix_max_sum)
>>> eqn3 = constraints_func3(column_sum_min_lst, column_sum_max_lst)
>>> 
>>> # remove unnecessary constraints
>>> eqn3 = '\n'.join(ms.merge(*eqn3.strip().split('\n'))) + '\n'
>>> 

Now, I need to isolate something other than x0, x1, or x2... so I could either go back and not include the above three variable in the relevant equations until after simplification, or I have to work a bit to isolate something else. I'll sloppily do the isolation however I can with what I have.

>>> # isolate non-fixed single variables, where relevant
>>> eqn3c = sorted(eqn3.strip().split('\n'))
>>> eqn3a = ms.simplify('\n'.join(eqn3c[:2])+'\n', all=True, target='x5') + '\n' 
>>> eqn3b = ms.simplify('\n'.join(eqn3c[2:4])+'\n', all=True, target='x6') + '\n'
>>> eqn3c = ms.simplify('\n'.join(eqn3c[4:])+'\n', all=True, target='x7') + '\n' 
>>> eqn2 = ms.simplify(eqn2, all=True, target='x3') + '\n'

We then combine, and generate the constraints.

# combine simplified equations and generate constraints
equations = eqn0 + eqn1 + eqn2 + eqn3a + eqn3b + eqn3c
constrain = ms.generate_constraint(ms.generate_solvers(equations), join=my.constraints.and_)

Set up the solver...

>>> def obj_func(x):
...     kx = constrain(x)
...     y =  sum(kx)
...     return y
... 
>>> mon = my.monitors.VerboseMonitor(1)
>>> obj_vals = []
>>> def callback(x):
...     kx = constrain(x)
...     y =  sum(kx)
...     mon(kx, y)
...     obj_vals.append(y)
... 
>>> # create a starting matrix
>>> start_matrix = [random.randint(0,3) for i in range(260)]

then solve...

>>> # run optimizer
>>> solution = so.minimize(obj_func, start_matrix, method='SLSQP', tol=0.01, options={'disp': True, 'maxiter':100}, callback=callback)
Generation 0 has ChiSquare: 783.0
Generation 1 has ChiSquare: 2073.8870967741877
Generation 2 has ChiSquare: 668.5887699219646
Generation 3 has ChiSquare: 856.5643183986956
Generation 4 has ChiSquare: 620.5068341441591
Generation 5 has ChiSquare: 785.2132279949118
Generation 6 has ChiSquare: 730.8870714406557
Generation 7 has ChiSquare: 655.2910093597624
Generation 8 has ChiSquare: 628.109917778625
Generation 9 has ChiSquare: 630.5176087171566
Generation 10 has ChiSquare: 605.725338759661
Generation 11 has ChiSquare: 610.5089572225905
Generation 12 has ChiSquare: 606.1689809653246
Generation 13 has ChiSquare: 600.0869446386264
Generation 14 has ChiSquare: 600.0
Optimization terminated successfully    (Exit mode 0)
            Current function value: 600.0
            Iterations: 15
            Function evaluations: 3933
            Gradient evaluations: 15

Then print the solution:

>>> df_solution = pd.DataFrame(np.array(mon.x[-1]).reshape(-1,5), columns=['Col1','Col2','Col3','Col4','Col5'])
>>> print(df_solution)
     Col1   Col2   Col3  Col4  Col5
0   100.0  200.0  300.0   0.0   0.0
1     0.0    0.0    0.0   0.0   0.0
2     0.0    0.0    0.0   0.0   0.0
3     0.0    0.0    0.0   0.0   0.0
4     0.0    0.0    0.0   0.0   0.0
5     0.0    0.0    0.0   0.0   0.0
6     0.0    0.0    0.0   0.0   0.0
7     0.0    0.0    0.0   0.0   0.0
8     0.0    0.0    0.0   0.0   0.0
9     0.0    0.0    0.0   0.0   0.0
10    0.0    0.0    0.0   0.0   0.0
11    0.0    0.0    0.0   0.0   0.0
12    0.0    0.0    0.0   0.0   0.0
13    0.0    0.0    0.0   0.0   0.0
14    0.0    0.0    0.0   0.0   0.0
15    0.0    0.0    0.0   0.0   0.0
16    0.0    0.0    0.0   0.0   0.0
17    0.0    0.0    0.0   0.0   0.0
18    0.0    0.0    0.0   0.0   0.0
19    0.0    0.0    0.0   0.0   0.0
20    0.0    0.0    0.0   0.0   0.0
21    0.0    0.0    0.0   0.0   0.0
22    0.0    0.0    0.0   0.0   0.0
23    0.0    0.0    0.0   0.0   0.0
24    0.0    0.0    0.0   0.0   0.0
25    0.0    0.0    0.0   0.0   0.0
26    0.0    0.0    0.0   0.0   0.0
27    0.0    0.0    0.0   0.0   0.0
28    0.0    0.0    0.0   0.0   0.0
29    0.0    0.0    0.0   0.0   0.0
30    0.0    0.0    0.0   0.0   0.0
31    0.0    0.0    0.0   0.0   0.0
32    0.0    0.0    0.0   0.0   0.0
33    0.0    0.0    0.0   0.0   0.0
34    0.0    0.0    0.0   0.0   0.0
35    0.0    0.0    0.0   0.0   0.0
36    0.0    0.0    0.0   0.0   0.0
37    0.0    0.0    0.0   0.0   0.0
38    0.0    0.0    0.0   0.0   0.0
39    0.0    0.0    0.0   0.0   0.0
40    0.0    0.0    0.0   0.0   0.0
41    0.0    0.0    0.0   0.0   0.0
42    0.0    0.0    0.0   0.0   0.0
43    0.0    0.0    0.0   0.0   0.0
44    0.0    0.0    0.0   0.0   0.0
45    0.0    0.0    0.0   0.0   0.0
46    0.0    0.0    0.0   0.0   0.0
47    0.0    0.0    0.0   0.0   0.0
48    0.0    0.0    0.0   0.0   0.0
49    0.0    0.0    0.0   0.0   0.0
50    0.0    0.0    0.0   0.0   0.0
51    0.0    0.0    0.0   0.0   0.0

Hopefully, that looks right.