Hi there, I still need help with this issue. Please help!
Here is the entire error message:
AttributeError Traceback (most recent call last)
~/miniconda3/envs/momi-py36/lib/python3.6/site-packages/momi/demo_model.py in _demo_fun(self, *x)
557 self._set_x(x)
--> 558 return self._get_demo(None)
559 except Exception as e:
~/miniconda3/envs/momi-py36/lib/python3.6/site-packages/momi/demo_model.py in _get_demo(self, sampled_n_dict)
540 events = sorted(events, key=lambda e: e.t(params_dict))
--> 541 G = _build_demo_graph(events, sampled_n_dict, params_dict, default_N=1.0)
542 demo = Demography(G)
~/miniconda3/envs/momi-py36/lib/python3.6/site-packages/momi/events.py in _build_demo_graph(events, sample_sizes, params_dict, default_N)
20 for e in events:
---> 21 e.add_to_graph(_G, sample_sizes, params_dict)
22
~/miniconda3/envs/momi-py36/lib/python3.6/site-packages/momi/events.py in add_to_graph(self, G, sample_sizes, params_dict)
145 G.node[newpop]['sizes'] = [
--> 146 {'t': t, 'N': G.graph['default_N'], 'growth_rate':None}]
147 G.graph['roots'][i] = newpop
AttributeError: 'DiGraph' object has no attribute 'node'
The above exception was the direct cause of the following exception:
ValueError Traceback (most recent call last)
in
1 #Optimizing models modelZ
2 modelZ.set_data(sfs)
----> 3 modelZ.optimize(method="L-BFGS-B")
~/miniconda3/envs/momi-py36/lib/python3.6/site-packages/momi/demo_model.py in optimize(self, method, jac, hess, hessp, printfreq, **kwargs)
950 jac=jac, hess=hess, hessp=hessp,
951 bounds=bounds, callback=callback,
--> 952 **kwargs)
953
954 self._set_x(res.x)
~/miniconda3/envs/momi-py36/lib/python3.6/site-packages/momi/likelihood.py in find_mle(self, x0, method, jac, hess, hessp, bounds, callback, **kwargs)
267 return _find_minimum(fun, x0, scipy.optimize.minimize,
268 bounds=bounds, callback=callback,
--> 269 opt_kwargs=opt_kwargs, gradmakers=gradmakers, replacefun=replacefun)
270
271
~/miniconda3/envs/momi-py36/lib/python3.6/site-packages/momi/optimizers.py in _find_minimum(f, start_params, optimizer, bounds, callback, opt_kwargs, **kwargs)
87
88 ret = _find_minimum_helper(
---> 89 f, start_params, optimizer, opt_kwargs, **kwargs)
90 if fixed_params:
91 ret.x = get_x(ret.x)
~/miniconda3/envs/momi-py36/lib/python3.6/site-packages/momi/optimizers.py in _find_minimum_helper(f, start_params, optimizer, opt_kwargs, gradmakers, replacefun)
101 if replacefun:
102 f = replacefun(f)
--> 103 return optimizer(f, start_params, **opt_kwargs)
104
105 stochastic_opts = {}
~/miniconda3/envs/momi-py36/lib/python3.6/site-packages/scipy/optimize/_minimize.py in minimize(fun, x0, args, method, jac, hess, hessp, bounds, constraints, tol, callback, options)
616 elif meth == 'l-bfgs-b':
617 return _minimize_lbfgsb(fun, x0, args, jac, bounds,
--> 618 callback=callback, **options)
619 elif meth == 'tnc':
620 return _minimize_tnc(fun, x0, args, jac, bounds, callback=callback,
~/miniconda3/envs/momi-py36/lib/python3.6/site-packages/scipy/optimize/lbfgsb.py in _minimize_lbfgsb(fun, x0, args, jac, bounds, disp, maxcor, ftol, gtol, eps, maxfun, maxiter, iprint, callback, maxls, finite_diff_rel_step, **unknown_options)
306 sf = _prepare_scalar_function(fun, x0, jac=jac, args=args, epsilon=eps,
307 bounds=new_bounds,
--> 308 finite_diff_rel_step=finite_diff_rel_step)
309
310 func_and_grad = sf.fun_and_grad
~/miniconda3/envs/momi-py36/lib/python3.6/site-packages/scipy/optimize/optimize.py in _prepare_scalar_function(fun, x0, jac, args, bounds, epsilon, finite_diff_rel_step, hess)
260 # calculation reduces overall function evaluations.
261 sf = ScalarFunction(fun, x0, args, grad, hess,
--> 262 finite_diff_rel_step, bounds, epsilon=epsilon)
263
264 return sf
~/miniconda3/envs/momi-py36/lib/python3.6/site-packages/scipy/optimize/_differentiable_functions.py in init(self, fun, x0, args, grad, hess, finite_diff_rel_step, finite_diff_bounds, epsilon)
74
75 self._update_fun_impl = update_fun
---> 76 self._update_fun()
77
78 # Gradient evaluation
~/miniconda3/envs/momi-py36/lib/python3.6/site-packages/scipy/optimize/_differentiable_functions.py in _update_fun(self)
164 def _update_fun(self):
165 if not self.f_updated:
--> 166 self._update_fun_impl()
167 self.f_updated = True
168
~/miniconda3/envs/momi-py36/lib/python3.6/site-packages/scipy/optimize/_differentiable_functions.py in update_fun()
71
72 def update_fun():
---> 73 self.f = fun_wrapped(self.x)
74
75 self._update_fun_impl = update_fun
~/miniconda3/envs/momi-py36/lib/python3.6/site-packages/scipy/optimize/_differentiable_functions.py in fun_wrapped(x)
68 def fun_wrapped(x):
69 self.nfev += 1
---> 70 return fun(x, *args)
71
72 def update_fun():
~/miniconda3/envs/momi-py36/lib/python3.6/site-packages/scipy/optimize/optimize.py in call(self, x, *args)
72 def call(self, x, *args):
73 """ returns the the function value """
---> 74 self._compute_if_needed(x, *args)
75 return self._value
76
~/miniconda3/envs/momi-py36/lib/python3.6/site-packages/scipy/optimize/optimize.py in _compute_if_needed(self, x, *args)
66 if not np.all(x == self.x) or self._value is None or self.jac is None:
67 self.x = np.asarray(x).copy()
---> 68 fg = self.fun(x, *args)
69 self.jac = fg[1]
70 self._value = fg[0]
~/miniconda3/envs/momi-py36/lib/python3.6/site-packages/autograd/wrap_util.py in nary_f(*args, **kwargs)
18 else:
19 x = tuple(args[i] for i in argnum)
---> 20 return unary_operator(unary_f, x, *nary_op_args, **nary_op_kwargs)
21 return nary_f
22 return nary_operator
~/miniconda3/envs/momi-py36/lib/python3.6/site-packages/autograd/differential_operators.py in value_and_grad(fun, x)
133 """Returns a function that returns both value and gradient. Suitable for use
134 in scipy.optimize"""
--> 135 vjp, ans = _make_vjp(fun, x)
136 if not vspace(ans).size == 1:
137 raise TypeError("value_and_grad only applies to real scalar-output "
~/miniconda3/envs/momi-py36/lib/python3.6/site-packages/autograd/core.py in make_vjp(fun, x)
8 def make_vjp(fun, x):
9 start_node = VJPNode.new_root()
---> 10 end_value, end_node = trace(start_node, fun, x)
11 if end_node is None:
12 def vjp(g): return vspace(x).zeros()
~/miniconda3/envs/momi-py36/lib/python3.6/site-packages/autograd/tracer.py in trace(start_node, fun, x)
8 with trace_stack.new_trace() as t:
9 start_box = new_box(x, t, start_node)
---> 10 end_box = fun(start_box)
11 if isbox(end_box) and end_box._trace == start_box._trace:
12 return end_box._value, end_box._node
~/miniconda3/envs/momi-py36/lib/python3.6/site-packages/autograd/wrap_util.py in unary_f(x)
13 else:
14 subargs = subvals(args, zip(argnum, x))
---> 15 return fun(*subargs, **kwargs)
16 if isinstance(argnum, int):
17 x = args[argnum]
~/miniconda3/envs/momi-py36/lib/python3.6/site-packages/momi/likelihood.py in fun(x)
261 @functools.wraps(self.kl_div)
262 def fun(x):
--> 263 ret = self.kl_div(x)
264 hist.recent_vals += [(x, ret)]
265 return ret
~/miniconda3/envs/momi-py36/lib/python3.6/site-packages/momi/likelihood.py in kl_div(self, x)
173 Returns KL-Divergence(Empirical || Theoretical(x)).
174 """
--> 175 log_lik = self.log_lik(x)
176 #ret = -log_lik + self.sfs.n_snps() * self.sfs._entropy + _entropy_mut_term(self.mut_rate, self.sfs, self.p_missing, self.use_pairwise_diffs)
177 ret = -log_lik + self.sfs.n_snps() * self.sfs._entropy
~/miniconda3/envs/momi-py36/lib/python3.6/site-packages/momi/likelihood.py in log_lik(self, x, vector)
98 Returns the composite log-likelihood of the data at the point x.
99 """
--> 100 ret = self._log_lik(x, vector=vector)
101 logger.debug("log-likelihood = {0}".format(ret))
102 return ret
~/miniconda3/envs/momi-py36/lib/python3.6/site-packages/momi/likelihood.py in _log_lik(self, x, vector)
120
121 def _log_lik(self, x, vector):
--> 122 demo = self._get_multipop_moran(x)
123 ret = self._get_multinom_loglik(demo, vector=vector) + self._mut_factor(demo, vector=vector)
124 if vector:
~/miniconda3/envs/momi-py36/lib/python3.6/site-packages/momi/likelihood.py in _get_multipop_moran(self, x)
132 logger.debug(
133 "Computing log-likelihood at x = {0}".format(str(x).replace('\n', '')))
--> 134 demo = self.demo_func(*x)
135 else:
136 demo = x
~/miniconda3/envs/momi-py36/lib/python3.6/site-packages/momi/demo_model.py in _demo_fun(self, *x)
565
566 new_msg = "Exception encountered at parameter values {} (internal scaling x = {})".format(curr_params, curr_scaled)
--> 567 raise ValueError(new_msg) from e
568 finally:
569 self._set_x(prev_x)
ValueError: Exception encountered at parameter values {'n_MB': 'Autograd ArrayBox with value 562063.2341561384', 'n_VG': 'Autograd ArrayBox with value 297409.5086782989', 'tdiv': 'Autograd ArrayBox with value 1307458.9891879773'} (internal scaling x = {'n_MB': 'Autograd ArrayBox with value 13.239369638838406', 'n_VG': 'Autograd ArrayBox with value 12.602865285188061', 'tdiv': 'Autograd ArrayBox with value -3.9653866120615224'})
Hi there, I still need help with this issue. Please help!
Originally posted by @tobitliyandja in #79