Python: update of personalized steps of Scipy basinhopping and restricted loops

I'm looking for the global minimum of a given function and trying to use its gradient (here just like Jacobin) to guide the step counter. However, my X It is the solution and so is my gradient. I am also trying to recover as quickly as possible the first X for which f (x) <1, therefore I am using a restriction.

  • How can I update the X Entrance and the Jacobin?
  • My f (x) <1 It is not very effective, so is there any alternative to fulfill my requirement?

This is my code (more or less):

MyBounds class (object):
def __init __ (self, xmax =[2*np.pi, 2*np.pi, 2*np.pi, 2*np.pi, 1.2, 1.2, 1.2, 1.2], xmin =[0, 0, 0, 0, 0, 0, 0, 0] ):
self.xmax = np.array (xmax)
self.xmin = np.array (xmin)

def __call __ (self, ** kwargs):
x = kwargs["x_new"]
        tmax = bool (np.all (x <= self.xmax))
        tmin = bool(np.all(x >= self.xmin))
returns tmax and tmin

MyTakeStep class (object):
def __init __ (self, stepsize = 1):
self.stepsize = stepize

def compute_step (self, jacobi_matrix, x, i):
yes jacobi_matrix[i] < 0: r = np.random.uniform(0,      2*np.pi-x[i])
        elif jacobi_matrix[i] >    0: r = np.random.uniform (0-x[i], 0)
else: r = 0
go back

def __call __ (self, x):
print ("ENTERING FROM CALL")
print ("THIS IS X:", x)
jacobi_matrix = jacobian (x)
print ("x:", x)
print ("jacobi:", jacobi_matrix)
X[0] + = self.compute_step (jacobi_matrix, x, 0)
X[1] + = self.compute_step (jacobi_matrix, x, 1)
X[2] + = self.compute_step (jacobi_matrix, x, 2)
X[3] + = self.compute_step (jacobi_matrix, x, 3)
X[4] + = self.compute_step (jacobi_matrix, x, 4)
X[5] + = self.compute_step (jacobi_matrix, x, 5)
X[6] + = self.compute_step (jacobi_matrix, x, 6)
X[7] + = self.compute_step (jacobi_matrix, x, 7)
print ("newx:", x)
return x

def f (x):
# components of the objective function
result = g1
result + = g2
result + = g3
return result

def jacobian (x):
print ("input_list in Jacobi:", x)

# define full derivatives
dG_dphi = dg1_dphi + dg2_dphi + dg3_dphi
dG_dr = dg1_dr + dg2_dr + dg3_dr
gradient = np.hstack ((dG_dphi, dG_dr))

print ("G:", gradient.shape, gradient, " n")
return gradient

def callback (x, f, accept):
print ("x:% 65s | f:% 5s | accept:% 5s"% (str ([round(e,3) for e in x]), str (round (f, 3)), accept))

def hopping_solver (min_f, min_x, input_excitation):
# define limits
mybounds = MyBounds ()
mytakestep = MyTakeStep ()
comb = [deg2rad(phi) for phi in  input_excitation[:4]]+ input_excitation[4:]
    print ("comb:", comb)
min_f = 10
tol = 0
Cons = {& # 39; type & # 39 ;: & # 39; ineq & # 39 ;, & # 39; fun & # 39 ;: lambda x: 1-f (x)}
k = {"method": & # 39; Nelder-Mead & # 39 ;, & # 39; restrictions & # 39 ;: cons, & # 39; jac & # 39 ;: jacobian, & tollet & # 39 ;: tol }
optimal_c = optimize.basinhopping (f,
x0 = comb,
Niter = 1000000,
T = 8,
step size = 1,
minimizer_kwargs = k,
take_step = mytakestep,
accept_test = mybounds,
callback = callback,
interval = 100000,
disp = true
niter_success = None)
print (optimum_c)
min_x, min_f = optimal_c['x'], optimum_c['fun']
    comb = min_x
sol = np.array (list ([nprad2deg(phi)paraphienlalista(optimum_c[nprad2deg(phi)forphiinlist(optimal_c[nprad2deg(phi)paraphienlalista(óptimo_c[nprad2deg(phi)forphiinlist(optimal_c['x'][:4])]) + list (optimum_c['x'][4:]))
min_x = sun
returns min_x, min_f

Any help is very appreciated, thanks in advance.