Merge /home/vsc/yap

This commit is contained in:
Vítor Santos Costa 2018-09-13 17:08:12 +01:00
commit b64853fd1a
11 changed files with 456 additions and 487 deletions

View File

@ -1,21 +1,19 @@
/******************************************************************""*******
* *
* YAP Prolog *
* *
* Yap Prolog was developed at NCCUP - Universidade do Porto *
* *
* Copyright L.Damas, V.S.Costa and Universidade do Porto 1985-1997 *
* *
**************************************************************************
* *
* File: arrays.c *
* Last rev: *
* mods: *
* comments: Array Manipulation Routines *
* *
*************************************************************************/
* *
* YAP Prolog *
* *
* Yap Prolog was developed at NCCUP - Universidade do Porto *
* *
* Copyright L.Damas, V.S.Costa and Universidade do Porto 1985-1997 *
* *
**************************************************************************
* *
* File: arrays.c * Last rev:
** mods: * comments: Array Manipulation Routines *
* *
*************************************************************************/
/**
/**
@file arrays.c
@ -106,9 +104,9 @@ The following predicates manipulate arrays:
*/
#include "Yap.h"
#include "YapEval.h"
#include "Yatom.h"
#include "clause.h"
#include "YapEval.h"
#include "heapgc.h"
#if HAVE_ERRNO_H
#include <errno.h>
@ -373,7 +371,7 @@ static ArrayEntry *GetArrayEntry(Atom at, int owner) {
#if THREADS
&& pp->owner_id != worker_id
#endif
)
)
pp = RepArrayProp(pp->NextOfPE);
READ_UNLOCK(ae->ARWLock);
return pp;
@ -986,7 +984,7 @@ restart:
#if THREADS
&& ((ArrayEntry *)pp)->owner_id != worker_id
#endif
)
)
pp = RepProp(pp->NextOfPE);
if (EndOfPAEntr(pp)) {
if (HR + 1 + size > ASP - 1024) {
@ -1025,24 +1023,24 @@ restart:
return (FALSE);
}
#define CREATE_ARRAY_DEFS() \
PAR("type", isatom, CREATE_ARRAY_TYPE), \
PAR("address", filler, CREATE_ARRAY_ADDRESS), \
PAR("int", filler, CREATE_ARRAY_INT), \
PAR("dbref", filler, CREATE_ARRAY_DBREF), \
PAR("float", filler, CREATE_ARRAY_FLOAT), \
PAR("ptr", filler, CREATE_ARRAY_PTR), \
PAR("atom", filler, CREATE_ARRAY_ATOM), \
PAR("char", filler, CREATE_ARRAY_CHAR), \
PAR("unsigned_char", filler, CREATE_UNSIGNED_CHAR), \
PAR("term", filler, CREATE_ARRAY_TERM), \
PAR("nb_term", filler, CREATE_ARRAY_NB_TERM)
#define CREATE_ARRAY_DEFS() \
PAR("type", isatom, CREATE_ARRAY_TYPE), \
PAR("address", filler, CREATE_ARRAY_ADDRESS), \
PAR("int", filler, CREATE_ARRAY_INT), \
PAR("dbref", filler, CREATE_ARRAY_DBREF), \
PAR("float", filler, CREATE_ARRAY_FLOAT), \
PAR("ptr", filler, CREATE_ARRAY_PTR), \
PAR("atom", filler, CREATE_ARRAY_ATOM), \
PAR("char", filler, CREATE_ARRAY_CHAR), \
PAR("unsigned_char", filler, CREATE_ARRAY_UNSIGNED_CHAR), \
PAR("term", filler, CREATE_ARRAY_TERM), \
PAR("nb_term", filler, CREATE_ARRAY_NB_TERM)
#define PAR(x, y, z) z
typedef enum create_array_enum_choices { CREATE_ARRAY_DEFS() } create_array_choices_t;
typedef enum create_array_enum_choices {
CREATE_ARRAY_DEFS()
} create_array_choices_t;
#undef PAR
@ -1052,24 +1050,22 @@ typedef enum create_array_enum_choices { CREATE_ARRAY_DEFS() } create_array_choi
static const param_t create_array_defs[] = {CREATE_ARRAY_DEFS()};
#undef PAR
/* create an array (+Name, + Size, +Props) */
/** @pred static_array(+ _Name_, + _Size_, + _Type_)
/** @pred static_array(+ _Name_, + _Size_, + _Type_)
Create a new static array with name _Name_. Note that the _Name_
must be an atom (named array). The _Size_ must evaluate to an
integer. The _Type_ must be bound to one of types mentioned
previously.
*/
static Int
create_static_array(USES_REGS1) {
Create a new static array with name _Name_. Note that the _Name_
must be an atom (named array). The _Size_ must evaluate to an
integer. The _Type_ must be bound to one of types mentioned
previously.
*/
static Int create_static_array(USES_REGS1) {
Term ti = Deref(ARG2);
Term t = Deref(ARG1);
Term tprops = Deref(ARG3);
Int size;
static_array_types props;
void *address = NULL;
if (IsVarTerm(ti)) {
Yap_Error(INSTANTIATION_ERROR, ti, "create static array");
@ -1084,42 +1080,62 @@ static Int
return (FALSE);
}
}
xarg *args =
Yap_ArgListToVector(tprops, create_array_defs, CREATE_ARRAY_NB_TERM, DOMAIN_ERROR_CREATE_ARRAY_OPTION);
if (IsVarTerm(tprops)) {
Yap_Error(INSTANTIATION_ERROR, tprops, "create static array");
return (FALSE);
} else if (IsAtomTerm(tprops)) {
char *atname = (char *)RepAtom(AtomOfTerm(tprops))->StrOfAE;
if (!strcmp(atname, "int"))
props = array_of_ints;
else if (!strcmp(atname, "dbref"))
props = array_of_dbrefs;
else if (!strcmp(atname, "float"))
props = array_of_doubles;
else if (!strcmp(atname, "ptr"))
props = array_of_ptrs;
else if (!strcmp(atname, "atom"))
props = array_of_atoms;
else if (!strcmp(atname, "char"))
props = array_of_chars;
else if (!strcmp(atname, "unsigned_char"))
props = array_of_uchars;
else if (!strcmp(atname, "term"))
props = array_of_terms;
else if (!strcmp(atname, "nb_term"))
props = array_of_nb_terms;
else {
Yap_Error(DOMAIN_ERROR_ARRAY_TYPE, tprops, "create static array");
return (FALSE);
xarg *args =
Yap_ArgListToVector(tprops, create_array_defs, CREATE_ARRAY_NB_TERM,
DOMAIN_ERROR_CREATE_ARRAY_OPTION);
if (args == NULL) {
if (LOCAL_Error_TYPE != YAP_NO_ERROR) {
Yap_Error(LOCAL_Error_TYPE, tprops, NULL);
}
} else {
Yap_Error(TYPE_ERROR_ATOM, tprops, "create static array");
return (FALSE);
return false;
}
if (args[CREATE_ARRAY_TYPE].used) {
tprops = args[CREATE_ARRAY_TYPE].tvalue;
{
char *atname = (char *)RepAtom(AtomOfTerm(tprops))->StrOfAE;
if (!strcmp(atname, "int"))
props = array_of_ints;
else if (!strcmp(atname, "dbref"))
props = array_of_dbrefs;
else if (!strcmp(atname, "float"))
props = array_of_doubles;
else if (!strcmp(atname, "ptr"))
props = array_of_ptrs;
else if (!strcmp(atname, "atom"))
props = array_of_atoms;
else if (!strcmp(atname, "char"))
props = array_of_chars;
else if (!strcmp(atname, "unsigned_char"))
props = array_of_uchars;
else if (!strcmp(atname, "term"))
props = array_of_terms;
else if (!strcmp(atname, "nb_term"))
props = array_of_nb_terms;
}
}
if (args[CREATE_ARRAY_ADDRESS].used) {
address = AddressOfTerm(args[CREATE_ARRAY_ADDRESS].tvalue);
}
if (args[CREATE_ARRAY_INT].used)
props = array_of_ints;
if (args[CREATE_ARRAY_DBREF].used)
props = array_of_dbrefs;
if (args[CREATE_ARRAY_FLOAT].used)
props = array_of_doubles;
if (args[CREATE_ARRAY_PTR].used)
props = array_of_ptrs;
if (args[CREATE_ARRAY_ATOM].used)
props = array_of_atoms;
if (args[CREATE_ARRAY_CHAR].used)
props = array_of_chars;
if (args[CREATE_ARRAY_UNSIGNED_CHAR].used)
props = array_of_uchars;
if (args[CREATE_ARRAY_TERM].used)
props = array_of_terms;
if (args[CREATE_ARRAY_NB_TERM].used)
props = array_of_nb_terms;
StaticArrayEntry *pp;
StaticArrayEntry *pp;
if (IsVarTerm(t)) {
Yap_Error(INSTANTIATION_ERROR, t, "create static array");
return (FALSE);
@ -1135,9 +1151,9 @@ xarg *args =
app = (ArrayEntry *)pp;
if (EndOfPAEntr(pp) || pp->ValueOfVE.ints == NULL) {
pp = CreateStaticArray(ae, size, props, NULL, pp PASS_REGS);
pp = CreateStaticArray(ae, size, props, address, pp PASS_REGS);
if (pp == NULL || pp->ValueOfVE.ints == NULL) {
return TRUE;
return TRUE;
}
} else if (ArrayIsDynamic(app)) {
if (IsVarTerm(app->ValueOfVE) && IsUnboundVar(&app->ValueOfVE)) {
@ -1146,24 +1162,25 @@ xarg *args =
Yap_Error(PERMISSION_ERROR_CREATE_ARRAY, t,
"cannot create static array over dynamic array");
}
} else {
} else {
if (pp->ArrayType != props) {
Yap_Error(TYPE_ERROR_ATOM, t, "create static array %d/%d %d/%d", pp->ArrayEArity,size,pp->ArrayType,props);
pp = NULL;
Yap_Error(TYPE_ERROR_ATOM, t, "create static array %d/%d %d/%d",
pp->ArrayEArity, size, pp->ArrayType, props);
pp = NULL;
} else {
AllocateStaticArraySpace(pp, props, pp->ValueOfVE.ints, size PASS_REGS);
AllocateStaticArraySpace(pp, props, pp->ValueOfVE.ints, size PASS_REGS);
}
}
WRITE_UNLOCK(ae->ARWLock);
if (!pp) {
if (!pp) {
return false;
}
return true;
}
return true;
}
return false;
}
/// create a new vectir in a given name Name. If one exists, destroy prrexisting
/// create a new vector in a given name Name. If one exists, destroy prrexisting
/// onr
StaticArrayEntry *Yap_StaticVector(Atom Name, size_t size,
static_array_types props) {

View File

@ -86,7 +86,7 @@ static bool setErr(const char *q, yap_error_descriptor_t *i, Term t) {
return false;
}
#define query_key_b(k, ks, q, i) \
#define query_key_b(k, ks, q, i) \
if (strcmp(ks, q) == 0) { \
return i->k ? TermTrue : TermFalse; \
}

View File

@ -782,7 +782,7 @@ option(WITH_LBFGS
"interface with lbfgs" ON)
if (WITH_LBFGS)
# add_subDIRECTORY(packages/yap-lbfgs)
add_subDIRECTORY(packages/yap-lbfgs)
endif ()

View File

@ -745,6 +745,11 @@ rhs(list(RHS), List) :- !,
rhs(lists(RHS), List) :- !,
rhs(RHS, X1),
matrix_to_lists( X1, List ).
rhs('[]'([Args], floats(RHS)), Val) :-
integer(RHS),
integer(Args),
!,
get_float_from_address(RHS,Args,Val).
rhs('[]'(Args, RHS), Val) :-
!,
rhs(RHS, X1),
@ -788,6 +793,11 @@ rhs(S, NS) :-
set_lhs(V, R) :- var(V), !, V = R.
set_lhs(V, R) :- number(V), !, V = R.
set_lhs('[]'([Args], floats(RHS)), Val) :-
!,
integer(RHS),
integer(Args),
set_float_from_address(RHS,Args,Val).
set_lhs('[]'(Args, M), Val) :-
matrix_dims( M, Dims, Bases),
maplist( index(Range), Args, Dims, Bases, NArgs),

View File

@ -3244,6 +3244,30 @@ is_matrix(void)
return TRUE;
}
static YAP_Bool
get_float_from_address(void)
{
YAP_Float *fp = (YAP_Float *)YAP_IntOfTerm(YAP_ARG1);
YAP_Int off = YAP_IntOfTerm(YAP_ARG2);
return YAP_Unify(YAP_ARG3, YAP_MkFloatTerm(fp[off]));
}
static YAP_Bool
set_float_from_address(void)
{
YAP_Float *fp = (YAP_Float *)YAP_IntOfTerm(YAP_ARG1);
YAP_Int off = YAP_IntOfTerm(YAP_ARG2);
YAP_Float f = YAP_FloatOfTerm(YAP_ARG3 );
fp[off] = f;
return true;
}
X_API void init_matrix( void );
X_API void
@ -3302,7 +3326,9 @@ init_matrix(void)
YAP_UserCPredicate("do_matrix_op_to_cols", matrix_op_to_cols, 4);
YAP_UserCPredicate("matrix_m", matrix_m, 2);
YAP_UserCPredicate("matrix", is_matrix, 1);
}
YAP_UserCPredicate("get_float_from_address",get_float_from_address , 3);
YAP_UserCPredicate("set_float_from_address",set_float_from_address , 3);
}
#ifdef _WIN32

View File

@ -493,26 +493,37 @@ init_learning :-
format_learning(1,'Initializing everything~n',[]),
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% Delete the BDDs from the previous run if they should
% not be reused
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
(
(
problog_flag(reuse_initialized_bdds,true),
problog_flag(rebuild_bdds,0)
)
->
true;
empty_bdd_directory
),
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% Check, if continuous facts are used.
% if yes, switch to problog_exact
% continuous facts are not supported yet.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
set_default_gradient_method,
( problog_flag(continuous_facts, true )
->
problog_flag(init_method,(_,_,_,_,OldCall)),
(
(
continuous_fact(_),
OldCall\=problog_exact_save(_,_,_,_,_)
)
->
(
format_learning(2,'Theory uses continuous facts.~nWill use problog_exact/3 as initalization method.~2n',[]),
set_problog_flag(init_method,(Query,Probability,BDDFile,ProbFile,problog_exact_save(Query,Probability,_Status,BDDFile,ProbFile)))
);
true
)
;
problog_tabled(_)
->
(
format_learning(2,'Theory uses tabling.~nWill use problog_exact/3 as initalization method.~2n',[]),
set_problog_flag(init_method,(Query,Probability,BDDFile,ProbFile,problog_exact_save(Query,Probability,_Status,BDDFile,ProbFile)))
);
true
),
succeeds_n_times(user:test_example(_,_,_,_),TestExampleCount),
format_learning(3,'~q test examples~n',[TestExampleCount]),
@ -610,8 +621,22 @@ init_one_query(QueryID,Query,Type) :-
(
recorded(QueryID, _, _)
->
format_learning(3,' Reuse existing BDD ~q~n~n',[QueryID]);
(
format_learning(3,' Reuse existing BDD ~q~n~n',[QueryID])
;
b_setval(problog_required_keep_ground_ids,false),
problog_flag(init_method,(Query,N,Bdd,graph2bdd(X,Y,N,Bdd))),
Query =.. [_,X,Y]
->
Bdd = bdd(Dir, Tree, MapList),
% trace,
graph2bdd(X,Y,N,Bdd),
rb_new(H0),
maplist_to_hash(MapList, H0, Hash),
Tree \= [],
writeln(QueryID),
tree_to_grad(Tree, Hash, [], Grad),
recordz(QueryID,bdd(Dir, Grad, MapList),_)
;
b_setval(problog_required_keep_ground_ids,false),
rb_new(H0),
problog_flag(init_method,(Query,NOf,Bdd,problog_kbest_as_bdd(Call,1,Bdd))),
@ -1016,9 +1041,19 @@ save_old_probabilities :-
)
).
save_old_probabilities :-
forall(tunable_fact(FactID,_),
(
get_fact_probability(FactID,OldProbability),
atomic_concat(['old_prob_',FactID],Key),
bb_put(Key,OldProbability)
)
).
forget_old_probabilities :-
problog_flag(continous_facts, true),
!,
forall(tunable_fact(FactID,_),
(
continuous_fact(FactID)
@ -1042,7 +1077,20 @@ forget_old_probabilities :-
)
).
forget_old_probabilities :-
forall(tunable_fact(FactID,_),
(
atomic_concat(['old_prob_',FactID],Key),
atomic_concat(['grad_',FactID],Key2),
bb_delete(Key,_),
bb_delete(Key2,_)
)
).
add_gradient(Learning_Rate) :-
problog_flag(continous_facts, true),
!,
forall(tunable_fact(FactID,_),
(
continuous_fact(FactID)
@ -1082,6 +1130,26 @@ add_gradient(Learning_Rate) :-
)
),
retractall(values_correct).
add_gradient(Learning_Rate) :-
forall(tunable_fact(FactID,_),
(
atomic_concat(['old_prob_',FactID],Key),
atomic_concat(['grad_',FactID],Key2),
bb_get(Key,OldProbability),
bb_get(Key2,GradValue),
inv_sigmoid(OldProbability,OldValue),
%writeln(FactID:OldValue +Learning_Rate*GradValue),
NewValue is OldValue +Learning_Rate*GradValue,
sigmoid(NewValue,NewProbability),
% Prevent "inf" by using values too close to 1.0
Prob_Secure is min(0.999999999,max(0.000000001,NewProbability)),
set_fact_probability(FactID,Prob_Secure)
)
),
retractall(values_correct).
% vsc: avoid silly search
@ -1496,17 +1564,17 @@ init_flags :-
problog_define_flag(init_method,problog_flag_validate_dummy,'ProbLog predicate to search proofs',(Query,Tree,problog:problog_kbest_as_bdd(Query,100,Tree)),learning_general,flags:learning_libdd_init_handler),
problog_define_flag(alpha,problog_flag_validate_number,'weight of negative examples (auto=n_p/n_n)',auto,learning_general,flags:auto_handler),
problog_define_flag(sigmoid_slope,problog_flag_validate_posnumber,'slope of sigmoid function',1.0,learning_general),
problog_define_flag(continuous_facts,problog_flag_validate_boolean,'support parameter learning of continuous distributions',true,learning_general),
% problog_define_flag(continuous_facts,problog_flag_validate_boolean,'support parameter learning of continuous distributions',1.0,learning_general),
problog_define_flag(learning_rate,problog_flag_validate_posnumber,'Default learning rate (If line_search=false)',examples,learning_line_search,flags:examples_handler),
problog_define_flag(line_search, problog_flag_validate_boolean,'estimate learning rate by line search',false,learning_line_search),
problog_define_flag(line_search_never_stop, problog_flag_validate_boolean,'make tiny step if line search returns 0',true,learning_line_search),
problog_define_flag(line_search_tau, problog_flag_validate_indomain_0_1_open,'tau value for line search',0.618033988749,learning_line_search),
writeln(1),
problog_define_flag(line_search_tolerance,problog_flag_validate_posnumber,'tolerance value for line search',0.05,learning_line_search),
problog_define_flag(line_search_interval, problog_flag_validate_dummy,'interval for line search',(0,100),learning_line_search,flags:linesearch_interval_handler).
init_logger :-
logger_define_variable(iteration, int),
logger_define_variable(iteration, int),
logger_define_variable(duration,time),
logger_define_variable(mse_trainingset,float),
logger_define_variable(mse_min_trainingset,float),

View File

@ -20,18 +20,20 @@
:- use_module(library(lbfgs)).
:- use_module(library(matrix)).
% This is the call back function which evaluates F and the gradient of F
evaluate(FX,_N,_Step, Args) :-
X0 <== Args[0],
evaluate(FX,X,G,_N,_Step) :-
X0 <== X[0],
FX is sin(X0),
G0 is cos(X0),
Args[0] <== G0.
G[0] <== G0.
% This is the call back function which is invoked to report the progress
% if the last argument is set to anywhting else than 0, the optimizer will
% stop right now
progress(FX,X_Norm,G_Norm,Step,_N,Iteration,Ls,Args, 0) :-
X0 <== Args[0],
progress(FX,X,G,X_Norm,G_Norm,Step,_N,Iteration,Ls, 0) :-
X0 <== X[0],
format('~d. Iteration : x0=~4f f(X)=~4f |X|=~4f |X\'|=~4f Step=~4f Ls=~4f~n',
[Iteration,X0,FX,X_Norm,G_Norm,Step,Ls]).
@ -39,16 +41,16 @@ progress(FX,X_Norm,G_Norm,Step,_N,Iteration,Ls,Args, 0) :-
demo :-
format('Optimizing the function f(x0) = sin(x0)~n',[]),
optimizer_initialize(1,evaluate,progress),
optimizer_initialize(1,X,Status),
StartX is random*10,
format('We start the search at the random position x0=~5f~2n',[StartX]),
optimizer_set_x(0,StartX),
X[0] <== StartX,
optimizer_run(BestF,Status),
optimizer_get_x(0,BestX0),
optimizer_finalize,
format('~2nOptimization done~nWe found a minimum at f(~f)=~f~2nLBFGS Status=~w~n',[BestX0,BestF,Status]).
optimizer_run(Status, BestF, BestX0, O),
BestX0 <== X[0],
optimizer_finalize(Status),
format('~2nOptimization done~nWe found a minimum at f(~f)=~f~2nLBFGS Status=~w~n',[BestX0,BestF,O]).

View File

@ -18,45 +18,46 @@
% along with YAP-LBFGS. If not, see <http://www.gnu.org/licenses/>.
:- use_module(library(lbfgs)).
:- use_module(library(matrix)).
% This is the call back function which evaluates F and the gradient of F
evaluate(FX,_N,_Step) :-
optimizer_get_x(0,X0),
optimizer_get_x(1,X1),
evaluate(FX,X,G,_N,_Step) :-
X0 <== X[0],
X1 <== X[1],
FX is (X0-2)*(X0-2) + (X1-1)*(X1-1),
G0 is 2*(X0-2),
G1 is 2*(X1-1),
optimizer_set_g(0,G0),
optimizer_set_g(1,G1).
G1 is 2*(X1-2),
G[0] <== G0,
G[1] <== G1.
% This is the call back function which is invoked to report the progress
% if the last argument is set to anywhting else than 0, the optimizer will
% if the last argument is set to anything else than 0, the optimizer will
% stop right now
progress(FX,X_Norm,G_Norm,Step,_N,Iteration,Ls,0) :-
optimizer_get_x(0,X0),
optimizer_get_x(1,X1),
progress(FX,X,_G,X_Norm,G_Norm,Step,_N,Iteration,Ls,0) :-
X0 <== X[0],
X1 <== X[1],
format('~d. Iteration : (x0,x1)=(~4f,~4f) f(X)=~4f |X|=~4f |X\'|=~4f Step=~4f Ls=~4f~n',[Iteration,X0,X1,FX,X_Norm,G_Norm,Step,Ls]).
demo :-
format('Optimizing the function f(x0,x1) = (x0-2)^2 + (x1-1)^2~n',[]),
optimizer_initialize(2,evaluate,progress),
optimizer_initialize(2,X),
StartX0 is random*1000-500,
StartX1 is random*1000-500,
format('We start the search at the random position (x0,x1)=(~5f,~5f)~2n',[StartX0,StartX1]),
optimizer_set_x(0,StartX0),
optimizer_set_x(1,StartX1),
X[0] <== StartX0,
X[1] <== StartX1,
optimizer_run(BestF,Status),
optimizer_get_x(0,BestX0),
optimizer_get_x(1,BestX1),
BestX0 <== X[0],
BestX1 <== X[1],
optimizer_finalize,
format('~2nOptimization done~nWe found a minimum at f(~f,~f)=~f~2nLBFGS Status=~w~n',[BestX0,BestX1,BestF,Status]).

View File

@ -21,19 +21,13 @@
:- module(lbfgs,[optimizer_initialize/3,
optimizer_initialize/4,
optimizer_run/2,
optimizer_get_x/2,
optimizer_set_x/2,
optimizer_run/4,
optimizer_get_g/2,
optimizer_set_g/2,
optimizer_finalize/1,
optimizer_finalize/0,
optimizer_set_parameter/2,
optimizer_get_parameter/2,
optimizer_parameters/0]).
optimizer_set_parameter/3,
optimizer_get_parameter/3,
optimizer_parameters/1]).
% switch on all the checks to reduce bug searching time
% :- yap_flag(unknown,error).
@ -156,57 +150,47 @@ yes
:- load_foreign_files(['libLBFGS'],[],'init_lbfgs_predicates').
/** @pred optimizer_initialize(+N,+Evaluate,+Progress)
The same as before, except that the user module is the default
value.
Example
~~~~
optimizer_initialize(1,evaluate,progress)
optimizer_initialize(1)
~~~~~
*/
optimizer_initialize(N,Call_Evaluate,Call_Progress) :-
optimizer_initialize(N,user,Call_Evaluate,Call_Progress).
optimizer_initialize(N,Module,Call_Evaluate,Call_Progress) :-
optimizer_finalize,
!,
optimizer_initialize(N,Module,Call_Evaluate,Call_Progress).
optimizer_initialize(N,Module,Call_Evaluate,Call_Progress) :-
optimizer_initialize(N,X,t(N,X,XO,Params)) :-
\+ initialized,
integer(N),
N>0,
% check whether there are such call back functions
current_module(Module),
current_predicate(Module:Call_Evaluate/3),
current_predicate(Module:Call_Progress/8),
optimizer_reserve_memory(N),
optimizer_reserve_memory(N,X,XO,Params),
% install call back predicates in the user module which call
% the predicates given by the arguments
EvalGoal =.. lbfgs_callback_evaluate(e(E1,E2,E3)),
ProgressGoal =.. [Call_Progress,P1,P2,P3,P4,P5,P6,P7,P8],
retractall( lbfgs_callback_evaluate(_Step ),
retractall( lbfgs_callback_progress(_P1,_P2,_P3,_P4,_P5,_P6,_P7,_P8) ),
assert( ( lbfgs_callback_evaluate(E1,E2,E3) :- Module:EvalGoal, !) ),
assert( ( lbfgs_callback_progress(P1,P2,P3,P4,P5,P6,P7,P8) :- Module:ProgressGoal, !) ),
assert(initialized).
/** @pred optimizer_finalize/0
Clean up the memory.
*/
optimizer_finalize :-
optimizer_finalize(t(N,X,XO,Params)) :-
initialized,
optimizer_free_memory,
retractall(user:'$lbfgs_callback_evaluate'(_,_,_)),
retractall(user:'$lbfgs_callback_progress'(_,_,_,_,_,_,_,_)),
optimizer_free_memory(X,XO,Params) ,
retractall(initialized).
/** @pred optimizer_run/3
Do the work.
*/
optimizer_run(t(N,X,XO,Params),FX,XO,Status) :-
optimizer_run(N,X, FX, XO, Status, Params).
/** @pred optimizer_parameters/0
/** @pred optimizer_parameters/1
Prints a table with the current parameters. See the <a href="http://www.chokkan.org/software/liblbfgs/structlbfgs__parameter__t.html#_details">documentation
of libLBFGS</a> for the meaning of each parameter.
@ -233,41 +217,41 @@ int orthantwise_end -1 End index for computing the L1 norm
==========================================================================================
~~~~
*/
optimizer_parameters :-
optimizer_get_parameter(m,M),
optimizer_get_parameter(epsilon,Epsilon),
optimizer_get_parameter(past,Past),
optimizer_get_parameter(delta,Delta),
optimizer_get_parameter(max_iterations,Max_Iterations),
optimizer_get_parameter(linesearch,Linesearch),
optimizer_get_parameter(max_linesearch,Max_Linesearch),
optimizer_get_parameter(min_step,Min_Step),
optimizer_get_parameter(max_step,Max_Step),
optimizer_get_parameter(ftol,Ftol),
optimizer_get_parameter(gtol,Gtol),
optimizer_get_parameter(xtol,Xtol),
optimizer_get_parameter(orthantwise_c,Orthantwise_C),
optimizer_get_parameter(orthantwise_start,Orthantwise_Start),
optimizer_get_parameter(orthantwise_end,Orthantwise_End),
optimizer_parameterse(t(X,XO,Params)) :-
optimizer_get_parameter(m,M ,Params),
optimizer_get_parameter(epsilon,Epsilon ,Params),
optimizer_get_parameter(past,Past ,Params),
optimizer_get_parameter(delta,Delta ,Params),
optimizer_get_parameter(max_iterations,Max_Iterations ,Params),
optimizer_get_parameter(linesearch,Linesearch ,Params),
optimizer_get_parameter(max_linesearch,Max_Linesearch ,Params),
optimizer_get_parameter(min_step,Min_Step ,Params),
optimizer_get_parameter(max_step,Max_Step ,Params),
optimizer_get_parameter(ftol,Ftol ,Params),
optimizer_get_parameter(gtol,Gtol ,Params),
optimizer_get_parameter(xtol,Xtol ,Params),
optimizer_get_parameter(orthantwise_c,Orthantwise_C ,Params),
optimizer_get_parameter(orthantwise_start,Orthantwise_Start ,Params),
optimizer_get_parameter(orthantwise_end,Orthantwise_End ,Params),
format('/******************************************************************************************~n',[]),
print_param('Name','Value','Description','Type'),
format('******************************************************************************************~n',[]),
print_param(m,M,'The number of corrections to approximate the inverse hessian matrix.',int),
print_param(epsilon,Epsilon,'Epsilon for convergence test.',float),
print_param(past,Past,'Distance for delta-based convergence test.',int),
print_param(delta,Delta,'Delta for convergence test.',float),
print_param(max_iterations,Max_Iterations,'The maximum number of iterations',int),
print_param(linesearch,Linesearch,'The line search algorithm.',int),
print_param(max_linesearch,Max_Linesearch,'The maximum number of trials for the line search.',int),
print_param(min_step,Min_Step,'The minimum step of the line search routine.',float),
print_param(max_step,Max_Step,'The maximum step of the line search.',float),
print_param(ftol,Ftol,'A parameter to control the accuracy of the line search routine.',float),
print_param(gtol,Gtol,'A parameter to control the accuracy of the line search routine.',float),
print_param(xtol,Xtol,'The machine precision for floating-point values.',float),
print_param(orthantwise_c,Orthantwise_C,'Coefficient for the L1 norm of variables',float),
print_param(orthantwise_start,Orthantwise_Start,'Start index for computing the L1 norm of the variables.',int),
print_param(orthantwise_end,Orthantwise_End,'End index for computing the L1 norm of the variables.',int),
format('/******************************************************************************************~n',[] ),
print_param('Name','Value','Description','Type' ,Params),
format('******************************************************************************************~n',[] ),
print_param(m,M,'The number of corrections to approximate the inverse hessian matrix.',int ,Params),
print_param(epsilon,Epsilon,'Epsilon for convergence test.',float ,Params),
print_param(past,Past,'Distance for delta-based convergence test.',int ,Params),
print_param(delta,Delta,'Delta for convergence test.',float ,Params),
print_param(max_iterations,Max_Iterations,'The maximum number of iterations',int ,Params),
print_param(linesearch,Linesearch,'The line search algorithm.',int ,Params),
print_param(max_linesearch,Max_Linesearch,'The maximum number of trials for the line search.',int ,Params),
print_param(min_step,Min_Step,'The minimum step of the line search routine.',float ,Params),
print_param(max_step,Max_Step,'The maximum step of the line search.',float ,Params),
print_param(ftol,Ftol,'A parameter to control the accuracy of the line search routine.',float ,Params),
print_param(gtol,Gtol,'A parameter to control the accuracy of the line search routine.',float ,Params),
print_param(xtol,Xtol,'The machine precision for floating-point values.',float ,Params),
print_param(orthantwise_c,Orthantwise_C,'Coefficient for the L1 norm of variables',float ,Params),
print_param(orthantwise_start,Orthantwise_Start,'Start index for computing the L1 norm of the variables.',int ,Params),
print_param(orthantwise_end,Orthantwise_End,'End index for computing the L1 norm of the variables.',int ,Params),
format('******************************************************************************************/~n',[]),
format(' use optimizer_set_paramater(Name,Value) to change parameters~n',[]),
format(' use optimizer_get_parameter(Name,Value) to see current parameters~n',[]),

View File

@ -493,7 +493,7 @@ int lbfgs(
*
* @param param The pointer to the parameter structure.
*/
void lbfgs_parameter_init(lbfgs_parameter_t *param);
extern void lbfgs_parameter_init(lbfgs_parameter_t *param);
/**
* Allocate an array for variables.
@ -506,7 +506,7 @@ void lbfgs_parameter_init(lbfgs_parameter_t *param);
*
* @param n The number of variables.
*/
lbfgsfloatval_t* lbfgs_malloc(int n);
extern lbfgsfloatval_t* lbfgs_malloc(int n);
/**
* Free an array of variables.
@ -514,7 +514,7 @@ lbfgsfloatval_t* lbfgs_malloc(int n);
* @param x The array of variables allocated by ::lbfgs_malloc
* function.
*/
void lbfgs_free(lbfgsfloatval_t *x);
extern void lbfgs_free(lbfgsfloatval_t *x);
/** @} */

View File

@ -32,13 +32,8 @@
X_API void init_lbfgs_predicates( void ) ;
int optimizer_status=OPTIMIZER_STATUS_NONE; // the internal state
int n; // the size of the parameter vector
lbfgsfloatval_t *x; // pointer to the parameter vector x[0],...,x[n-1]
lbfgsfloatval_t *g; // pointer to the gradient vector g[0],...,g[n-1]
lbfgs_parameter_t param; // the parameters used for lbfgs
YAP_Functor fcall3, fprogress8;
YAP_Functor fevaluate, fprogress, fmodule, ffloats;
YAP_Term tuser;
static lbfgsfloatval_t evaluate(
void *instance,
@ -49,48 +44,61 @@ static lbfgsfloatval_t evaluate(
)
{
YAP_Term call;
YAP_Term a1;
YAP_Term v, a1;
YAP_Bool result;
YAP_Int s1;
YAP_Term t[4];
YAP_Term t[5], t2[2];
t[0] = YAP_MkVarTerm();
t[1] = YAP_MkIntTerm(n);
t[2] = YAP_MkFloatTerm(step);
t[0] = v = YAP_MkVarTerm();
t[1] = YAP_MkIntTerm((YAP_Int)x);
t[1] = YAP_MkApplTerm(ffloats, 1, t+1);
t[2] = YAP_MkIntTerm((YAP_Int)g_tmp);
t[2] = YAP_MkApplTerm(ffloats, 1, t+2);
t[3] = YAP_MkIntTerm(n);
t[4] = YAP_MkFloatTerm(step);
call = YAP_MkApplTerm(fcall3, 4, t);
g=g_tmp;
t2[0] = tuser;
t2[1] = YAP_MkApplTerm(fevaluate, 5, t);
s1 = YAP_InitSlot(call);
optimizer_status=OPTIMIZER_STATUS_CB_EVAL;
result=YAP_CallProlog(call);
optimizer_status=OPTIMIZER_STATUS_RUNNING;
call = YAP_MkApplTerm( fmodule, 2, t2 );
s1 = YAP_InitSlot(v);
//optimizer_status=OPTIMIZER_STATUS_CB_EVAL;
result=YAP_RunGoal(call);
//optimizer_status=OPTIMIZER_STATUS_RUNNING;
if (result==FALSE) {
printf("ERROR: the evaluate call failed in YAP.\n");
// Goal did not succeed
return FALSE;
YAP_ShutdownGoal( TRUE );
return FALSE;
}
call = YAP_GetFromSlot( s1 );
a1 = YAP_GetFromSlot( s1 );
a1 = YAP_ArgOfTerm(1,call);
lbfgsfloatval_t rc;
if (YAP_IsFloatTerm(a1)) {
return (lbfgsfloatval_t) YAP_FloatOfTerm(a1);
rc = (lbfgsfloatval_t) YAP_FloatOfTerm(a1);
} else if (YAP_IsIntTerm(a1)) {
return (lbfgsfloatval_t) YAP_IntOfTerm(a1);
rc = (lbfgsfloatval_t) YAP_IntOfTerm(a1);
} else {
fprintf(stderr, "ERROR: The evaluate call back function did not return a number as first argument.\n");
rc = false;
}
fprintf(stderr, "ERROR: The evaluate call back function did not return a number as first argument.\n");
return 0;
YAP_ShutdownGoal( TRUE );
return rc;
}
static int progress(
void *instance,
const lbfgsfloatval_t *local_x,
const lbfgsfloatval_t *local_g,
const lbfgsfloatval_t fx,
const lbfgsfloatval_t xnorm,
const lbfgsfloatval_t gnorm,
@ -104,33 +112,43 @@ static int progress(
YAP_Bool result;
YAP_Int s1;
YAP_Term t[8];
YAP_Term t[9],t2[2], v;
t[0] = YAP_MkFloatTerm(fx);
t[1] = YAP_MkFloatTerm(xnorm);
t[2] = YAP_MkFloatTerm(gnorm);
t[3] = YAP_MkFloatTerm(step);
t[4] = YAP_MkIntTerm(n);
t[5] = YAP_MkIntTerm(k);
t[6] = YAP_MkIntTerm(ls);
t[7] = YAP_MkVarTerm();
t[1] = YAP_MkIntTerm((YAP_Int)local_x);
t[1] = YAP_MkApplTerm(ffloats, 1, t+1);
t[2] = YAP_MkIntTerm((YAP_Int)local_g);
t[2] = YAP_MkApplTerm(ffloats, 1, t+2);
t[3] = YAP_MkFloatTerm(xnorm);
t[4] = YAP_MkFloatTerm(gnorm);
t[5] = YAP_MkFloatTerm(step);
t[6] = YAP_MkIntTerm(n);
t[7] = YAP_MkIntTerm(k);
t[8] = YAP_MkIntTerm(ls);
t[9] = v = YAP_MkVarTerm();
call = YAP_MkApplTerm( fprogress8, 8, t);
s1 = YAP_InitSlot(call);
t2[0] = tuser;
t2[1] = YAP_MkApplTerm( fprogress, 10, t);
optimizer_status=OPTIMIZER_STATUS_CB_PROGRESS;
result=YAP_CallProlog(call);
optimizer_status=OPTIMIZER_STATUS_RUNNING;
call = YAP_MkApplTerm( fmodule, 2, t2 );
s1 = YAP_InitSlot(v);
call = YAP_GetFromSlot( s1 );
//optimizer_status=OPTIMIZER_STATUS_CB_PROGRESS;
result=YAP_RunGoal(call);
//optimizer_status=OPTIMIZER_STATUS_RUNNING;
YAP_Term o = YAP_GetFromSlot( s1 );
if (result==FALSE) {
printf("ERROR: the progress call failed in YAP.\n");
// Goal did not succeed
return FALSE;
YAP_ShutdownGoal( TRUE );
return -1;
}
if (YAP_IsIntTerm(YAP_ArgOfTerm(8,call))) {
return YAP_IntOfTerm(YAP_ArgOfTerm(8,call));
if (YAP_IsIntTerm(o)) {
int v = YAP_IntOfTerm(o);
//YAP_ShutdownGoal( TRUE );
return (int)v;
}
YAP_ShutdownGoal( TRUE );
@ -138,138 +156,6 @@ static int progress(
return 1;
}
/** @pred optimizer_set_x(+I,+X)
Set the current value for `x[I]`. Only possible when the optimizer is
initialized but not running.
*/
static YAP_Bool set_x_value(void) {
YAP_Term t1=YAP_ARG1;
YAP_Term t2=YAP_ARG2;
int i=0;
if (optimizer_status!=OPTIMIZER_STATUS_INITIALIZED) {
printf("ERROR: set_x_value/2 can be called only when the optimizer is initialized and not running.\n");
return FALSE;
}
if (YAP_IsIntTerm(t1)) {
i=YAP_IntOfTerm(t1);
} else {
return FALSE;
}
if (i<0 || i>=n) {
printf("ERROR: invalid index for set_x_value/2.\n");
return FALSE;
}
if (YAP_IsFloatTerm(t2)) {
x[i]=(lbfgsfloatval_t) YAP_FloatOfTerm(t2);
} else if (YAP_IsIntTerm(t2)) {
x[i]=(lbfgsfloatval_t) YAP_IntOfTerm(t2);
} else {
return false;
}
return TRUE;
}
/** @pred optimizer_get_x(+I,-X)
Get the current value for `x[I]`. Only possible when the optimizer is
initialized or running.
*/
static YAP_Bool get_x_value(void) {
YAP_Term t1=YAP_ARG1;
YAP_Term t2=YAP_ARG2;
int i=0;
if (optimizer_status==OPTIMIZER_STATUS_NONE) {
printf("ERROR: set_x_value/2 can be called only when the optimizer is initialized.\n");
return FALSE;
}
if (YAP_IsIntTerm(t1)) {
i=YAP_IntOfTerm(t1);
} else {
return FALSE;
}
if (i<0 || i>=n) {
printf("ERROR: invalid index for set_x_value/2.\n");
return FALSE;
}
return YAP_Unify(t2,YAP_MkFloatTerm(x[i]));
}
/** @pred optimizer_set_g(+I,+G) Set the current value for `g[I]` (the
partial derivative of _F_ with respect to `x[I]`). Can only be called
from the evaluate call back predicate.
*/
static YAP_Bool set_g_value(void) {
YAP_Term t1=YAP_ARG1;
YAP_Term t2=YAP_ARG2;
int i=0;
if (optimizer_status != OPTIMIZER_STATUS_CB_EVAL) {
printf("ERROR: optimizer_set_g/2 can only be called by the evaluation call back function.\n");
return FALSE;
}
if (YAP_IsIntTerm(t1)) {
i=YAP_IntOfTerm(t1);
} else {
return FALSE;
}
if (i<0 || i>=n) {
return FALSE;
}
if (YAP_IsFloatTerm(t2)) {
g[i]=(lbfgsfloatval_t) YAP_FloatOfTerm(t2);
} else if (YAP_IsIntTerm(t2)) {
g[i]=(lbfgsfloatval_t) YAP_IntOfTerm(t2);
} else {
return FALSE;
}
return TRUE;
}
/** @pred optimizer_get_g(+I,-G)
Get the current value for `g[I]` (the partial derivative of _F_ with respect to `x[I]`). Only possible when the optimizer is
initialized or running.
*/
static YAP_Bool get_g_value(void) {
YAP_Term t1=YAP_ARG1;
YAP_Term t2=YAP_ARG2;
int i=0;
if (optimizer_status != OPTIMIZER_STATUS_RUNNING && optimizer_status != OPTIMIZER_STATUS_CB_EVAL && optimizer_status != OPTIMIZER_STATUS_CB_PROGRESS) {
printf("ERROR: optimizer_get_g/2 can only be called while the optimizer is running.\n");
return FALSE;
}
if (YAP_IsIntTerm(t1)) {
i=YAP_IntOfTerm(t1);
} else {
return FALSE;
}
if (i<0 || i>=n) {
return FALSE;
}
return YAP_Unify(t2,YAP_MkFloatTerm(g[i]));
}
/** @pred optimizer_initialize(+N,+Module,+Evaluate,+Progress)
Create space to optimize a function with _N_ variables (_N_ has to be
integer).
@ -285,7 +171,7 @@ to evaluate the function math <span class="math">_F</span>_,
Example
~~~~
optimizer_initialize(1,user,evaluate,progress)</span>
optimizer_initialize(1,user,evaluate,progress,e,g)</span>
~~~~
@ -314,11 +200,8 @@ value will terminate the optimization process.
static YAP_Bool optimizer_initialize(void) {
YAP_Term t1 = YAP_ARG1;
int temp_n=0;
if (optimizer_status!=OPTIMIZER_STATUS_NONE) {
printf("ERROR: Optimizer has already been initialized. Please call optimizer_finalize/0 first.\n");
return FALSE;
}
lbfgsfloatval_t *temp_x, *temp_ox;
lbfgs_parameter_t *temp_p;
if (! YAP_IsIntTerm(t1)) {
@ -331,18 +214,18 @@ static YAP_Bool optimizer_initialize(void) {
return FALSE;
}
x = lbfgs_malloc(temp_n);
temp_n = 16*(temp_n/16+15);
lbfgs_parameter_init((temp_p=(lbfgs_parameter_t *)malloc(sizeof(lbfgs_parameter_t))));
temp_ox = lbfgs_malloc(temp_n);
YAP_Term tox = YAP_MkIntTerm((YAP_Int)temp_ox);
temp_x = lbfgs_malloc(temp_n);
YAP_Term tx = YAP_MkIntTerm((YAP_Int)temp_x);
tx = YAP_MkApplTerm(ffloats, 1, &tx);
tox = YAP_MkApplTerm(ffloats, 1, &tox);
YAP_Term tp = YAP_MkIntTerm((YAP_Int)temp_p);
if (x == NULL) {
printf("ERROR: Failed to allocate a memory block for variables.\n");
return FALSE;
}
n=temp_n;
optimizer_status=OPTIMIZER_STATUS_INITIALIZED;
return TRUE;
return YAP_Unify(YAP_ARG2,tx) && YAP_Unify(YAP_ARG3,tox) && YAP_Unify(YAP_ARG4,tp) ;
}
@ -355,75 +238,54 @@ meaning.
*/
static YAP_Bool optimizer_run(void) {
int ret = 0;
YAP_Term t1 = YAP_ARG1;
int n = YAP_IntOfTerm(YAP_ARG1);
YAP_Term t2 = YAP_ARG2;
YAP_Int s1, s2;
lbfgsfloatval_t fx;
lbfgsfloatval_t * tmp_x=x;
lbfgsfloatval_t *temp_x = ( lbfgsfloatval_t *) YAP_IntOfTerm(YAP_ARG2),
*temp_ox = ( lbfgsfloatval_t *) YAP_IntOfTerm(YAP_ARG4);
lbfgs_parameter_t *temp_p = (lbfgs_parameter_t * ) YAP_IntOfTerm(YAP_ARG6);
if (optimizer_status == OPTIMIZER_STATUS_NONE) {
printf("ERROR: Memory for parameter vector not initialized, please call optimizer_initialize/1 first.\n");
return FALSE;
}
//optimizer_status = OPTIMIZER_STATUS_RUNNING;
ret = lbfgs(n, temp_x, &fx, evaluate, progress, temp_ox, temp_p);
if (optimizer_status != OPTIMIZER_STATUS_INITIALIZED) {
printf("ERROR: Optimizer is running right now. Please wait till it is finished.\n");
return FALSE;
}
// both arguments have to be variables
if (! YAP_IsVarTerm(t1) || ! YAP_IsVarTerm(t2)) {
return FALSE;
}
s1 = YAP_InitSlot(t1);
s2 = YAP_InitSlot(t2);
optimizer_status = OPTIMIZER_STATUS_RUNNING;
ret = lbfgs(n, x, &fx, evaluate, progress, NULL, &param);
x=tmp_x;
optimizer_status = OPTIMIZER_STATUS_INITIALIZED;
YAP_Unify(YAP_GetFromSlot(s1),YAP_MkFloatTerm(fx));
YAP_Unify(YAP_GetFromSlot(s2),YAP_MkIntTerm(ret));
return TRUE;
return YAP_Unify(YAP_MkIntTerm(ret), YAP_ARG5);
}
static YAP_Bool optimizer_finalize( void ) {
if (optimizer_status == OPTIMIZER_STATUS_NONE) {
printf("Error: Optimizer is not initialized.\n");
return FALSE;
}
/* if (optimizer_status == OPTIMIZER_STATUS_NONE) { */
/* printf("Error: Optimizer is not initialized.\n"); */
/* return FALSE; */
/* } */
if (optimizer_status == OPTIMIZER_STATUS_INITIALIZED) {
lbfgs_free(x);
x=NULL;
n=0;
optimizer_status = OPTIMIZER_STATUS_NONE;
/* if (optimizer_status == OPTIMIZER_STATUS_INITIALIZED) { */
lbfgs_free((void *)YAP_IntOfTerm(YAP_ArgOfTerm(1,YAP_ARG1)));
lbfgs_free((void *)YAP_IntOfTerm(YAP_ArgOfTerm(1,YAP_ARG2)));
lbfgs_free((void *)YAP_IntOfTerm(YAP_ARG3));
return TRUE;
}
/* } */
printf("ERROR: Optimizer is running right now. Please wait till it is finished.\n");
return FALSE;
/* printf("ERROR: Optimizer is running right now. Please wait till it is finished.\n"); */
/* return FALSE; */
}
/** @pred optimizer_set_parameter(+Name,+Value)
/** @pred optimizer_set_parameter(+Name,+Value,+Parameters)
Set the parameter Name to Value. Only possible while the optimizer
is not running.
*/
static YAP_Bool optimizer_set_parameter( void ) {
YAP_Term t1 = YAP_ARG1;
YAP_Term t2 = YAP_ARG2;
if (optimizer_status != OPTIMIZER_STATUS_NONE && optimizer_status != OPTIMIZER_STATUS_INITIALIZED){
printf("ERROR: Optimizer is running right now. Please wait till it is finished.\n");
return FALSE;
}
lbfgs_parameter_t *param = (lbfgs_parameter_t *) YAP_IntOfTerm(YAP_ARG3);
/* if (optimizer_status != OPTIMIZER_STATUS_NONE && optimizer_status != OPTIMIZER_STATUS_INITIALIZED){ */
/* printf("ERROR: Optimizer is running right now. Please wait till it is finished.\n"); */
/* return FALSE; */
/* } */
if (! YAP_IsAtomTerm(t1)) {
@ -436,7 +298,7 @@ static YAP_Bool optimizer_set_parameter( void ) {
if (! YAP_IsIntTerm(t2)) {
return FALSE;
}
param.m = YAP_IntOfTerm(t2);
param->m = YAP_IntOfTerm(t2);
} else if ((strcmp(name, "epsilon") == 0)) {
lbfgsfloatval_t v;
@ -448,12 +310,12 @@ static YAP_Bool optimizer_set_parameter( void ) {
return FALSE;
}
param.epsilon=v;
param->epsilon=v;
} else if ((strcmp(name, "past") == 0)) {
if (! YAP_IsIntTerm(t2)) {
return FALSE;
}
param.past = YAP_IntOfTerm(t2);
param->past = YAP_IntOfTerm(t2);
} else if ((strcmp(name, "delta") == 0)) {
lbfgsfloatval_t v;
@ -465,22 +327,22 @@ static YAP_Bool optimizer_set_parameter( void ) {
return FALSE;
}
param.delta=v;
param->delta=v;
} else if ((strcmp(name, "max_iterations") == 0)) {
if (! YAP_IsIntTerm(t2)) {
return FALSE;
}
param.max_iterations = YAP_IntOfTerm(t2);
param->max_iterations = YAP_IntOfTerm(t2);
} else if ((strcmp(name, "linesearch") == 0)) {
if (! YAP_IsIntTerm(t2)) {
return FALSE;
}
param.linesearch = YAP_IntOfTerm(t2);
param->linesearch = YAP_IntOfTerm(t2);
} else if ((strcmp(name, "max_linesearch") == 0)) {
if (! YAP_IsIntTerm(t2)) {
return FALSE;
}
param.max_linesearch = YAP_IntOfTerm(t2);
param->max_linesearch = YAP_IntOfTerm(t2);
} else if ((strcmp(name, "min_step") == 0)) {
lbfgsfloatval_t v;
@ -492,7 +354,7 @@ static YAP_Bool optimizer_set_parameter( void ) {
return FALSE;
}
param.min_step=v;
param->min_step=v;
} else if ((strcmp(name, "max_step") == 0)) {
lbfgsfloatval_t v;
@ -504,7 +366,7 @@ static YAP_Bool optimizer_set_parameter( void ) {
return FALSE;
}
param.max_step=v;
param->max_step=v;
} else if ((strcmp(name, "ftol") == 0)) {
lbfgsfloatval_t v;
@ -516,7 +378,7 @@ static YAP_Bool optimizer_set_parameter( void ) {
return FALSE;
}
param.ftol=v;
param->ftol=v;
} else if ((strcmp(name, "gtol") == 0)) {
lbfgsfloatval_t v;
@ -528,7 +390,7 @@ static YAP_Bool optimizer_set_parameter( void ) {
return FALSE;
}
param.gtol=v;
param->gtol=v;
} else if ((strcmp(name, "xtol") == 0)) {
lbfgsfloatval_t v;
@ -540,7 +402,7 @@ static YAP_Bool optimizer_set_parameter( void ) {
return FALSE;
}
param.xtol=v;
param->xtol=v;
} else if ((strcmp(name, "orthantwise_c") == 0)) {
lbfgsfloatval_t v;
@ -552,17 +414,17 @@ static YAP_Bool optimizer_set_parameter( void ) {
return FALSE;
}
param.orthantwise_c=v;
param->orthantwise_c=v;
} else if ((strcmp(name, "orthantwise_start") == 0)) {
if (! YAP_IsIntTerm(t2)) {
return FALSE;
}
param.orthantwise_start = YAP_IntOfTerm(t2);
param->orthantwise_start = YAP_IntOfTerm(t2);
} else if ((strcmp(name, "orthantwise_end") == 0)) {
if (! YAP_IsIntTerm(t2)) {
return FALSE;
}
param.orthantwise_end = YAP_IntOfTerm(t2);
param->orthantwise_end = YAP_IntOfTerm(t2);
} else {
printf("ERROR: The parameter %s is unknown.\n",name);
return FALSE;
@ -579,6 +441,7 @@ Get the current Value for Name
static YAP_Bool optimizer_get_parameter( void ) {
YAP_Term t1 = YAP_ARG1;
YAP_Term t2 = YAP_ARG2;
lbfgs_parameter_t *param = (lbfgs_parameter_t *) YAP_IntOfTerm(YAP_ARG3);
if (! YAP_IsAtomTerm(t1)) {
return FALSE;
@ -587,35 +450,35 @@ static YAP_Bool optimizer_get_parameter( void ) {
const char* name=YAP_AtomName(YAP_AtomOfTerm(t1));
if ((strcmp(name, "m") == 0)) {
return YAP_Unify(t2,YAP_MkIntTerm(param.m));
return YAP_Unify(t2,YAP_MkIntTerm(param->m));
} else if ((strcmp(name, "epsilon") == 0)) {
return YAP_Unify(t2,YAP_MkFloatTerm(param.epsilon));
return YAP_Unify(t2,YAP_MkFloatTerm(param->epsilon));
} else if ((strcmp(name, "past") == 0)) {
return YAP_Unify(t2,YAP_MkIntTerm(param.past));
return YAP_Unify(t2,YAP_MkIntTerm(param->past));
} else if ((strcmp(name, "delta") == 0)) {
return YAP_Unify(t2,YAP_MkFloatTerm(param.delta));
return YAP_Unify(t2,YAP_MkFloatTerm(param->delta));
} else if ((strcmp(name, "max_iterations") == 0)) {
return YAP_Unify(t2,YAP_MkIntTerm(param.max_iterations));
return YAP_Unify(t2,YAP_MkIntTerm(param->max_iterations));
} else if ((strcmp(name, "linesearch") == 0)) {
return YAP_Unify(t2,YAP_MkIntTerm(param.linesearch));
return YAP_Unify(t2,YAP_MkIntTerm(param->linesearch));
} else if ((strcmp(name, "max_linesearch") == 0)) {
return YAP_Unify(t2,YAP_MkIntTerm(param.max_linesearch));
return YAP_Unify(t2,YAP_MkIntTerm(param->max_linesearch));
} else if ((strcmp(name, "min_step") == 0)) {
return YAP_Unify(t2,YAP_MkFloatTerm(param.min_step));
return YAP_Unify(t2,YAP_MkFloatTerm(param->min_step));
} else if ((strcmp(name, "max_step") == 0)) {
return YAP_Unify(t2,YAP_MkFloatTerm(param.max_step));
return YAP_Unify(t2,YAP_MkFloatTerm(param->max_step));
} else if ((strcmp(name, "ftol") == 0)) {
return YAP_Unify(t2,YAP_MkFloatTerm(param.ftol));
return YAP_Unify(t2,YAP_MkFloatTerm(param->ftol));
} else if ((strcmp(name, "gtol") == 0)) {
return YAP_Unify(t2,YAP_MkFloatTerm(param.gtol));
return YAP_Unify(t2,YAP_MkFloatTerm(param->gtol));
} else if ((strcmp(name, "xtol") == 0)) {
return YAP_Unify(t2,YAP_MkFloatTerm(param.xtol));
return YAP_Unify(t2,YAP_MkFloatTerm(param->xtol));
} else if ((strcmp(name, "orthantwise_c") == 0)) {
return YAP_Unify(t2,YAP_MkFloatTerm(param.orthantwise_c));
return YAP_Unify(t2,YAP_MkFloatTerm(param->orthantwise_c));
} else if ((strcmp(name, "orthantwise_start") == 0)) {
return YAP_Unify(t2,YAP_MkIntTerm(param.orthantwise_start));
return YAP_Unify(t2,YAP_MkIntTerm(param->orthantwise_start));
} else if ((strcmp(name, "orthantwise_end") == 0)) {
return YAP_Unify(t2,YAP_MkIntTerm(param.orthantwise_end));
return YAP_Unify(t2,YAP_MkIntTerm(param->orthantwise_end));
}
printf("ERROR: The parameter %s is unknown.\n",name);
@ -628,22 +491,20 @@ static YAP_Bool optimizer_get_parameter( void ) {
X_API void init_lbfgs_predicates( void )
{
fcall3 = YAP_MkFunctor(YAP_LookupAtom("$lbfgs_callback_evaluate"), 3);
fprogress8 = YAP_MkFunctor(YAP_LookupAtom("$lbfgs_callback_progress"), 8);
fevaluate = YAP_MkFunctor(YAP_LookupAtom("evaluate"), 5);
fprogress = YAP_MkFunctor(YAP_LookupAtom("progress"), 10);
fmodule = YAP_MkFunctor(YAP_LookupAtom(":"), 2);
ffloats = YAP_MkFunctor(YAP_LookupAtom("floats"), 1);
tuser = YAP_MkAtomTerm(YAP_LookupAtom("user"));
//Initialize the parameters for the L-BFGS optimization.
lbfgs_parameter_init(&param);
// lbfgs_parameter_init(&param);
YAP_UserCPredicate("optimizer_reserve_memory",optimizer_initialize,1);
YAP_UserCPredicate("optimizer_run",optimizer_run,2);
YAP_UserCPredicate("optimizer_free_memory",optimizer_finalize,0);
YAP_UserCPredicate("optimizer_reserve_memory",optimizer_initialize,4);
YAP_UserCPredicate("optimizer_run",optimizer_run,6);
YAP_UserCPredicate("optimizer_free_memory",optimizer_finalize,3);
YAP_UserCPredicate("optimizer_set_x",set_x_value,2);
YAP_UserCPredicate("optimizer_get_x",get_x_value,2);
YAP_UserCPredicate("optimizer_set_g",set_g_value,2);
YAP_UserCPredicate("optimizer_get_g",get_g_value,2);
YAP_UserCPredicate("optimizer_set_parameter",optimizer_set_parameter,2);
YAP_UserCPredicate("optimizer_get_parameter",optimizer_get_parameter,2);
YAP_UserCPredicate("optimizer_set_parameter",optimizer_set_parameter,3);
YAP_UserCPredicate("optimizer_get_parameter",optimizer_get_parameter,3);
}