#!/usr/bin/env python2
# -*- coding: utf-8 -*-
"""
Created on Thu Sep  6 10:16:37 2018
@author: myhaspl
@email:myhaspl@myhaspl.com
二分法求解一元多次方程
"""

import tensorflow as tf

def f(x):
    y=pow(x,3)*3+pow(x,2)*2-19
    return y

def tRange(a,b,x,fa,fb,fx):
    b=x
    a=a
    x=tf.divide(tf.add(a,b),2)
    fa=fa
    fb=fx
    fx=f(x)
    return (a,b,x,fa,fb,fx)

def fRange(a,b,x,fa,fb,fx):
    b=b
    a=x
    x=tf.divide(tf.add(a,b),2)
    fa=fx
    fb=fb
    fx=f(x)
    return (a,b,x,fa,fb,fx) 

def body(a,b,x,fa,fb,fx,i,n):
    a,b,x,fa,fb,fx =tf.cond(tf.less(tf.multiply(fa,fx),0),lambda: tRange(a,b,x,fa,fb,fx),lambda: fRange(a,b,x,fa,fb,fx))
    return (tf.Print(a,[a],"a:"),tf.Print(b,[b],"b:"),tf.Print(x,[x],"x:"),tf.Print(fa,[fa],"fa:"),tf.Print(fb,[fb],"fb:"),tf.Print(fx,[fx],"fx:"),tf.Print(i+1,[i],"i:"),tf.Print(n,[n],"n:"))

def c(a,b,x,fa,fb,fx,i,n):
    t1=tf.greater(tf.divide(tf.subtract(b,a),2),tol)
    t2=tf.not_equal(fx,0)
    t3=tf.less(i,n)
    return tf.logical_and(tf.logical_and(t1,t2),t3)

a = tf.placeholder(tf.float32,shape=(),name="mya") 
b = tf.placeholder(tf.float32,shape=(),name="myb")
x = tf.placeholder(tf.float32,shape=(),name="myx")
tol= tf.placeholder(tf.float32,shape=(),name="mytol")
fa = tf.constant(0,dtype=tf.float32,name="myfa")
fb = tf.constant(0,dtype=tf.float32,name="myfb")
fx = tf.constant(0,dtype=tf.float32,name="myfx")
i = tf.constant(0,dtype=tf.int32,name="myi")
n = tf.constant(0,dtype=tf.int32,name="myi")

input_dict={a:-10,b:10,x:0,fa:f(-10),fb:f(10),fx:f(0),tol:1e-7,n:100}
res = tf.while_loop(c, body, loop_vars=[a,b,x,fa,fb,fx,i,n])

with tf.Session() as sess:    
    y=sess.run(res,feed_dict=input_dict)
    print y

....
....
n:[100]
i:[20]
x:[1.65252209]
b:[1.65252686]a:[1.65251732]
fx:[-7.43866e-05]fa:[-0.000221252441]fb:[7.2479248e-05]

n:[100]
i:[21]
x:[1.65252447]
b:[1.65252686]
fa:[-7.43866e-05]fx:[-1.90734863e-06]a:[1.65252209]fb:[7.2479248e-05]

i:[22]
n:[100]
a:[1.65252447]x:[1.65252566]b:[1.65252686]fx:[3.81469727e-05]

fb:[7.2479248e-05]fa:[-1.90734863e-06]

i:[23]n:[100]

b:[1.65252566]x:[1.65252507]
a:[1.65252447]

fb:[3.81469727e-05]
fa:[-1.90734863e-06]
fx:[1.90734863e-05]
n:[100]
i:[24]
x:[1.65252471]
b:[1.65252507]
a:[1.65252447]
fb:[1.90734863e-05]
fa:[-1.90734863e-06]
fx:[7.62939453e-06]
n:[100]
x:[1.65252459]
fx:[3.81469727e-06]i:[25]a:[1.65252447]b:[1.65252471]

fa:[-1.90734863e-06]
fb:[7.62939453e-06]
n:[100]
b:[1.65252459]
x:[1.65252447]fb:[3.81469727e-06]i:[26]a:[1.65252447]

fx:[-1.90734863e-06]fa:[-1.90734863e-06]

(1.6525245, 1.6525246, 1.6525245, -1.9073486e-06, 3.8146973e-06, -1.9073486e-06, 27, 100)