From 76971fa724d2157881915e5fa8b5e75fa9d963fc Mon Sep 17 00:00:00 2001 From: Vitor Santos Costa Date: Tue, 10 Feb 2009 23:57:45 +0000 Subject: [PATCH] add ProbLog to YAP distribution --- ProbLog/Makefile.in | 14 + ProbLog/README | 7 + ProbLog/examples/graph.pl | 86 ++ ProbLog/examples/learn_graph.pl | 96 ++ ProbLog/learning.yap | 1147 +++++++++++++++++++++ ProbLog/learning/logger.yap | 311 ++++++ ProbLog/problog.yap | 958 ++++++++++++++++++ ProbLog/problog/flags.yap | 284 ++++++ ProbLog/problog/print.yap | 24 + ProbLog/problog/tptree.yap | 500 +++++++++ ProbLog/simplecudd/Example.c | 281 ++++++ ProbLog/simplecudd/LICENCE | 131 +++ ProbLog/simplecudd/Makefile.in | 34 + ProbLog/simplecudd/ProblogBDD.c | 670 ++++++++++++ ProbLog/simplecudd/SimpleCUDD.pl | 141 +++ ProbLog/simplecudd/general.c | 234 +++++ ProbLog/simplecudd/general.h | 159 +++ ProbLog/simplecudd/simplecudd.c | 1620 ++++++++++++++++++++++++++++++ ProbLog/simplecudd/simplecudd.h | 287 ++++++ 19 files changed, 6984 insertions(+) create mode 100644 ProbLog/Makefile.in create mode 100644 ProbLog/README create mode 100644 ProbLog/examples/graph.pl create mode 100644 ProbLog/examples/learn_graph.pl create mode 100644 ProbLog/learning.yap create mode 100644 ProbLog/learning/logger.yap create mode 100644 ProbLog/problog.yap create mode 100644 ProbLog/problog/flags.yap create mode 100644 ProbLog/problog/print.yap create mode 100644 ProbLog/problog/tptree.yap create mode 100644 ProbLog/simplecudd/Example.c create mode 100644 ProbLog/simplecudd/LICENCE create mode 100644 ProbLog/simplecudd/Makefile.in create mode 100644 ProbLog/simplecudd/ProblogBDD.c create mode 100644 ProbLog/simplecudd/SimpleCUDD.pl create mode 100644 ProbLog/simplecudd/general.c create mode 100644 ProbLog/simplecudd/general.h create mode 100644 ProbLog/simplecudd/simplecudd.c create mode 100644 ProbLog/simplecudd/simplecudd.h diff --git a/ProbLog/Makefile.in b/ProbLog/Makefile.in new file mode 100644 index 000000000..007bf5c25 --- /dev/null +++ b/ProbLog/Makefile.in @@ -0,0 +1,14 @@ +default: + @(cd simplecudd; \ + echo Making simplecudd...; \ + make) + pwd + cp simplecudd/ProblogBDD . + +clean: + @(cd simplecudd; \ + echo Cleaning simplecudd...; \ + make clean; \ + cd ..) + rm -rf ProblogBDD output queries + \ No newline at end of file diff --git a/ProbLog/README b/ProbLog/README new file mode 100644 index 000000000..0b1a06a03 --- /dev/null +++ b/ProbLog/README @@ -0,0 +1,7 @@ +To compile ProbLog call + make +To clean the directory call + make clean + +The make file will recursively call the make file of SimpleCudd and Cudd. +And it will finally copy the binary executable ProblogBDD to the main directory. \ No newline at end of file diff --git a/ProbLog/examples/graph.pl b/ProbLog/examples/graph.pl new file mode 100644 index 000000000..cd3a4dfe8 --- /dev/null +++ b/ProbLog/examples/graph.pl @@ -0,0 +1,86 @@ +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% +% ProbLog program describing a probabilistic graph +% (running example from ProbLog presentations) +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + +:- use_module('../problog'). + +%%%% +% background knowledge +%%%% +% definition of acyclic path using list of visited nodes +path(X,Y) :- path(X,Y,[X],_). + +path(X,X,A,A). +path(X,Y,A,R) :- + X\==Y, + edge(X,Z), + absent(Z,A), + path(Z,Y,[Z|A],R). + +% using directed edges in both directions +edge(X,Y) :- dir_edge(Y,X). +edge(X,Y) :- dir_edge(X,Y). + +% checking whether node hasn't been visited before +absent(_,[]). +absent(X,[Y|Z]):-X \= Y, absent(X,Z). + +%%%% +% probabilistic facts +%%%% +0.9::dir_edge(1,2). +0.8::dir_edge(2,3). +0.6::dir_edge(3,4). +0.7::dir_edge(1,6). +0.5::dir_edge(2,6). +0.4::dir_edge(6,5). +0.7::dir_edge(5,3). +0.2::dir_edge(5,4). + + +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% +% example queries about path(1,4) +% +%%% explanation probability (and facts involved) +% ?- problog_max(path(1,4),Prob,FactsUsed). +% FactsUsed = [dir_edge(1,2),dir_edge(2,3),dir_edge(3,4)], +% Prob = 0.432 ? +% yes +%%% success probability +% ?- problog_exact(path(1,4),Prob,Status). +% 8 proofs +% Prob = 0.53864, +% Status = ok ? +% yes +%%% lower bound using 4 best proofs +% ?- problog_kbest(path(1,4),4,Prob,Status). +% 4 proofs +% Prob = 0.517344, +% Status = ok ? +% yes +%%% approximation using monte carlo, to reach 95%-confidence interval width 0.01 +% ?- problog_montecarlo(path(1,4),0.01,Prob). +% Prob = 0.537525 ? +% yes +%%% upper and lower bound using iterative deepening, final interval width 0.01 +% ?- problog_delta(path(1,4),0.01,Bound_low,Bound_up,Status). +% Bound_low = 0.5354096, +% Bound_up = 0.53864, +% Status = ok ? +% yes +%%% upper and lower bound obtained cutting the sld tree at probability 0.1 for each branch +% ?- problog_threshold(path(1,4),0.1,Bound_low,Bound_up,Status). +% 4 proofs +% Bound_low = 0.517344, +% Bound_up = 0.563728, +% Status = ok ? +% yes +%%% lower bound obtained cutting the sld tree at probability 0.2 for each branch +% ?- problog_low(path(1,4),0.2,Bound_low,Status). +% 1 proofs +% Bound_low = 0.432, +% Status = ok ? +% yes +% +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% diff --git a/ProbLog/examples/learn_graph.pl b/ProbLog/examples/learn_graph.pl new file mode 100644 index 000000000..86436d624 --- /dev/null +++ b/ProbLog/examples/learn_graph.pl @@ -0,0 +1,96 @@ +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% +% ProbLog program describing a probabilistic graph +% (running example from ProbLog presentations) +% +% example for parameter learning with LeProbLog +% +% training and test examples are included at the end of the file +% +% query ?- do_learning(20). +% will run 20 iterations of learning with default settings +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + +:- use_module('../learning'). + +%%%% +% background knowledge +%%%% +% definition of acyclic path using list of visited nodes +path(X,Y) :- path(X,Y,[X],_). + +path(X,X,A,A). +path(X,Y,A,R) :- + X\==Y, + edge(X,Z), + absent(Z,A), + path(Z,Y,[Z|A],R). + +% using directed edges in both directions +edge(X,Y) :- dir_edge(Y,X). +edge(X,Y) :- dir_edge(X,Y). + +% checking whether node hasn't been visited before +absent(_,[]). +absent(X,[Y|Z]):-X \= Y, absent(X,Z). + +%%%% +% probabilistic facts +% - probability represented by t/1 term means learnable parameter +% - argument of t/1 is real value (used to compare against in evaluation when known), use t(_) if unknown +%%%% +t(0.9)::dir_edge(1,2). +t(0.8)::dir_edge(2,3). +t(0.6)::dir_edge(3,4). +t(0.7)::dir_edge(1,6). +t(0.5)::dir_edge(2,6). +t(0.4)::dir_edge(6,5). +t(0.7)::dir_edge(5,3). +t(0.2)::dir_edge(5,4). + +%%%%%%%%%%%%%% +% training examples of form example(ID,Query,DesiredProbability) +%%%%%%%%%%%%%% + +example(1,path(1,2),0.94). +example(2,path(1,3),0.81). +example(3,path(1,4),0.54). +example(4,path(1,5),0.70). +example(5,path(1,6),0.87). +example(6,path(2,3),0.85). +example(7,path(2,4),0.57). +example(8,path(2,5),0.72). +example(9,path(2,6),0.86). +example(10,path(3,4),0.66). +example(11,path(3,5),0.80). +example(12,path(3,6),0.75). +example(13,path(4,5),0.57). +example(14,path(4,6),0.51). +example(15,path(5,6),0.69). +% some examples for learning from proofs: +example(16,(dir_edge(2,3),dir_edge(2,6),dir_edge(6,5),dir_edge(5,4)),0.032). +example(17,(dir_edge(1,6),dir_edge(2,6),dir_edge(2,3),dir_edge(3,4)),0.168). +example(18,(dir_edge(5,3),dir_edge(5,4)),0.14). +example(19,(dir_edge(2,6),dir_edge(6,5)),0.2). +example(20,(dir_edge(1,2),dir_edge(2,3),dir_edge(3,4)),0.432). + +%%%%%%%%%%%%%% +% test examples of form test_example(ID,Query,DesiredProbability) +% note: ID namespace is shared with training example IDs +%%%%%%%%%%%%%% + +test_example(21,path(2,1),0.94). +test_example(22,path(3,1),0.81). +test_example(23,path(4,1),0.54). +test_example(24,path(5,1),0.70). +test_example(25,path(6,1),0.87). +test_example(26,path(3,2),0.85). +test_example(27,path(4,2),0.57). +test_example(28,path(5,2),0.72). +test_example(29,path(6,2),0.86). +test_example(30,path(4,3),0.66). +test_example(31,path(5,3),0.80). +test_example(32,path(6,3),0.75). +test_example(33,path(5,4),0.57). +test_example(34,path(6,4),0.51). +test_example(35,path(6,5),0.69). + diff --git a/ProbLog/learning.yap b/ProbLog/learning.yap new file mode 100644 index 000000000..92de9c0c9 --- /dev/null +++ b/ProbLog/learning.yap @@ -0,0 +1,1147 @@ +%%% -*- Mode: Prolog; -*- + +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% +% Parameter Learning for ProbLog +% +% 27.10.2008 +% bernd.gutmann@cs.kuleuven.be +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + +:- module(learning,[do_learning/1, + do_learning/2, + set_learning_flag/2, + save_model/1, + problog_help/0, + set_problog_flag/2, + problog_flag/2, + problog_flags/0 + ]). + +% switch on all the checks to reduce bug searching time +:- style_check(all). +:- yap_flag(unknown,error). + +% load modules from the YAP library +:- use_module(library(lists)). +:- use_module(library(random),[random/1]). +:- use_module(library(system),[file_exists/1, + file_property/2, + delete_file/1, + make_directory/1, + shell/1, + shell/2]). + +% load our own modules +:- use_module('learning/logger'). +:- use_module(problog). + +% used to indicate the state of the system +:- dynamic values_correct/0. +:- dynamic learning_initialized/0. +:- dynamic current_iteration/1. +:- dynamic example_count/1. +:- dynamic query_probability_intern/2. +:- dynamic query_gradient_intern/3. +:- dynamic last_mse/1. + +% used to identify queries which have identical proofs +:- dynamic query_is_similar/2. +:- dynamic query_md5/2. + +% used by set_learning_flag +:- dynamic init_method/5. +:- dynamic rebuild_bdds/1. +:- dynamic rebuild_bdds_it/1. +:- dynamic reuse_initialized_bdds/1. +:- dynamic learning_rate/1. +:- dynamic probability_initializer/3. +:- dynamic check_duplicate_bdds/1. +:- dynamic output_directory/1. +:- dynamic query_directory/1. +:- dynamic log_frequency/1. +:- dynamic alpha/1. +:- dynamic sigmoid_slope/1. + + +%========================================================================== +%= You can set some flags and parameters +%= +%= init_method/5 specifies which ProbLog inference mechanism is used +%= to answer queries +%= +%= +%= if rebuild_bdds(true) is set, the bdds are rebuild after +%= each N iterations for rebuild_bdds_it(N) +%= +%= if reuse_initialized_bdds(true) is set, the bdds which are on the +%= harddrive from the previous run of LeProbLog are reused. +%= do not use this, when you changed the init method in the meantime +%= +%========================================================================== +set_learning_flag(init_method,(Query,Probability,BDDFile,ProbFile,Call)) :- + retractall(init_method(_,_,_,_,_)), + assert(init_method(Query,Probability,BDDFile,ProbFile,Call)). + +set_learning_flag(rebuild_bdds,Flag) :- + (Flag=true;Flag=false), + retractall(rebuild_bdds(_)), + assert(rebuild_bdds(Flag)). + +set_learning_flag(rebuild_bdds_it,Flag) :- + integer(Flag), + retractall(rebuild_bdds_it(_)), + assert(rebuild_bdds_it(Flag)). + + +set_learning_flag(reuse_initialized_bdds,Flag) :- + (Flag=true;Flag=false), + retractall(reuse_initialized_bdds(_)), + assert(reuse_initialized_bdds(Flag)). + +set_learning_flag(learning_rate,V) :- + (V=examples -> true;(number(V),V>=0)), + retractall(learning_rate(_)), + assert(learning_rate(V)). + +set_learning_flag(probability_initializer,(FactID,Probability,Query)) :- + var(FactID), + var(Probability), + callable(Query), + retractall(probability_initializer(_,_,_)), + assert(probability_initializer(FactID,Probability,Query)). + +set_learning_flag(check_duplicate_bdds,Flag) :- + (Flag=true;Flag=false), + retractall(check_duplicate_bdds(_)), + assert(check_duplicate_bdds(Flag)). + +set_learning_flag(output_directory,Directory) :- + ( + file_exists(Directory) + -> + file_property(Directory,type(directory)); + make_directory(Directory) + ), + + atomic_concat([Directory,'/'],Path), + atomic_concat([Directory,'/log.dat'],Logfile), + + retractall(output_directory(_)), + assert(output_directory(Path)), + logger_set_filename(Logfile), + set_problog_flag(dir,Directory). + +set_learning_flag(query_directory,Directory) :- + ( + file_exists(Directory) + -> + file_property(Directory,type(directory)); + make_directory(Directory) + ), + + atomic_concat([Directory,'/'],Path), + retractall(query_directory(_)), + assert(query_directory(Path)). + +set_learning_flag(log_frequency,Frequency) :- + integer(Frequency), + Frequency>=0, + retractall(log_frequency(_)), + assert(log_frequency(Frequency)). + +set_learning_flag(alpha,Alpha) :- + number(Alpha), + retractall(alpha(_)), + assert(alpha(Alpha)). +set_learning_flag(sigmoid_slope,Slope) :- + number(Slope), + Slope>0, + retractall(sigmoid_slope(_)), + assert(sigmoid_slope(Slope)). + + +%======================================================================== +%= store the facts with the learned probabilities to a file +%= if F is a variable, a filename based on the current iteration is used +%= +%======================================================================== +save_model(F) :- + ( + var(F) + -> + ( + current_iteration(Iteration), + output_directory(Directory), + atomic_concat([Directory,'factprobs_',Iteration,'.pl'],F) + );true + ), + export_facts(F). + +%======================================================================== +%= store the probabilities for all training and test examples +%= if F is a variable, a filename based on the current iteration is used +%= +%======================================================================== +save_predictions(F) :- + update_values, + + current_iteration(Iteration), + + ( + var(F) + -> + ( + current_iteration(Iteration), + output_directory(Directory), + atomic_concat([Directory,'predictions_',Iteration,'.pl'],F) + );true + ), + + open(F,'append',Handle), + format(Handle,"%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%\n",[]), + format(Handle,"% Iteration, train/test, QueryID, Query, GroundTruth, Prediction %\n",[]), + format(Handle,"%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%\n",[]), + !, + + ( % go over all training examples + current_predicate(user:example/3), + user:example(Query_ID,Query,TrueQueryProb), + query_probability(Query_ID,LearnedQueryProb), + + format(Handle,'ex(~q,train,~q,~q,~10f,~10f).\n', + [Iteration,Query_ID,Query,TrueQueryProb,LearnedQueryProb]), + + fail; % go to next training example + true + ), + + ( % go over all test examples + current_predicate(user:test_example/3), + user:test_example(Query_ID,Query,TrueQueryProb), + query_probability(Query_ID,LearnedQueryProb), + + format(Handle,'ex(~q,test,~q,~q,~10f,~10f).\n', + [Iteration,Query_ID,Query,TrueQueryProb,LearnedQueryProb]), + + fail; % go to next test example + true + ), + format(Handle,'~3n',[]), + close(Handle). + + + +%======================================================================== +%= find out whether some example IDs are used more than once +%= if so, complain and stop +%= +%======================================================================== + +check_examples :- + ( + ( + (current_predicate(user:example/3),user:example(ID,_,_), \+ atomic(ID)) ; + (current_predicate(user:test_example/3),user:test_example(ID,_,_), \+ atomic(ID)) + ) + -> + ( + format(user_error,'The example id of example ~q is not atomic (e.g foo42, 23, bar, ...).~n',[ID]), + throw(error(examples)) + ); true + ), + + ( + ( + (current_predicate(user:example/3),user:example(ID,_,P), (\+ number(P); P>1 ; P<0)); + (current_predicate(user:test_example/3),user:test_example(ID,_,P), (\+ number(P) ; P>1 ; P<0)) + ) + -> + ( + format(user_error,'The example ~q does not have a valid probaility value (~q).~n',[ID,P]), + throw(error(examples)) + ); true + ), + + + ( + ( + ( + current_predicate(user:example/3), + user:example(ID,QueryA,_), + user:example(ID,QueryB,_), + QueryA \= QueryB + ) ; + + ( + current_predicate(user:test_example/3), + user:test_example(ID,QueryA,_), + user:test_example(ID,QueryB,_), + QueryA \= QueryB + ); + + ( + current_predicate(user:example/3), + current_predicate(user:test_example/3), + user:example(ID,QueryA,_), + user:test_example(ID,QueryB,_), + QueryA \= QueryB + ) + ) + -> + ( + format(user_error,'The example id ~q is used several times.~n',[ID]), + throw(error(examples)) + ); true + ). + + + + + + +%======================================================================== +%= initialize everything and perform Iterations times gradient descent +%= can be called several times +%= if it is called with an epsilon parameter, it stops when the change +%= in the MSE is smaller than epsilon +%======================================================================== + +do_learning(Iterations) :- + integer(Iterations), + + ( + current_predicate(user:example/3) + -> + true; + format(user_error,'~n~nWarning: No training examples specified !!!~n~n',[]) + ), + + do_learning_intern(Iterations,-1). + +do_learning(Iterations,Epsilon) :- + integer(Iterations), + float(Epsilon), + + Iterations>0, + Epsilon>0.0, + + ( + current_predicate(user:example/3) + -> + true; + format(user_error,'~n~nWarning: No training examples specified !!!~n~n',[]) + ), + + do_learning_intern(Iterations,Epsilon). + + + + + + + +do_learning_intern(Iterations,Epsilon) :- + ( + Iterations=0 + -> + true; + ( + Iterations>0, + + % nothing will happen, if we're already initialized + init_learning, + current_iteration(OldIteration), + !, + retractall(current_iteration(_)), + !, + CurrentIteration is OldIteration+1, + assert(current_iteration(CurrentIteration)), + EndIteration is OldIteration+Iterations, + + format(' Iteration ~d of ~d~n',[CurrentIteration,EndIteration]), + logger_set_variable(iteration,CurrentIteration), + + logger_start_timer(duration), + gradient_descent, + + ( + (rebuild_bdds(true),rebuild_bdds_it(BDDFreq),0 =:= CurrentIteration mod BDDFreq) + -> + ( + once(delete_all_queries), + once(init_queries) + ); true + ), + + + mse_trainingset, + mse_testset, + + ( + last_mse(Last_MSE) + -> + ( + retractall(last_mse(_)), + logger_get_variable(mse_trainingset,Current_MSE), + assert(last_mse(Current_MSE)), + !, + MSE_Diff is abs(Last_MSE-Current_MSE) + ); ( + logger_get_variable(mse_trainingset,Current_MSE), + assert(last_mse(Current_MSE)), + MSE_Diff is Epsilon+1 + ) + ), + + !, + logger_stop_timer(duration), + once(ground_truth_difference), + + logger_write_data, + + + log_frequency(Log_Frequency), + + ( + ( Log_Frequency=0; 0 =:= CurrentIteration mod Log_Frequency) + -> + ( + save_predictions(_X), + save_model(_Y) + ); + true + ), + RemainingIterations is Iterations-1, + + ( + MSE_Diff>Epsilon + -> + do_learning_intern(RemainingIterations,Epsilon); + true + ) + ) + ). + + + +%======================================================================== +%= find proofs and build bdds for all training and test examples +%= +%= +%======================================================================== + +init_learning :- + ( + learning_initialized + -> + true; + ( + + check_examples, + + + format('Delete previous logs (if existing) from output directory~2n',[]), + empty_output_directory, + + format('Initializing everything~n',[]), + + + %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + % Delete the BDDs from the previous run if they should + % not be reused + %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + + + ( + (reuse_initialized_bdds(false);rebuild_bdds(true)) + -> + delete_all_queries; + true + ), + + + logger_write_header, + logger_start_timer(duration), + logger_set_variable(iteration,0), + + %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + % start count examples + %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + bb_put(training_examples,0), + ( % go over all training examples + current_predicate(user:example/3), + user:example(_,_,_), + bb_get(training_examples, OldCounter), + NewCounter is OldCounter+1, + bb_put(training_examples,NewCounter), + fail; + true + ), + + bb_put(test_examples,0), + ( % go over all test examples + current_predicate(user:test_example/3), + user:test_example(_,_,_), + bb_get(test_examples, OldCounter), + NewCounter is OldCounter+1, + bb_put(test_examples,NewCounter), + fail; + true + ), + %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + % stop count examples + %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + !, + + bb_delete(training_examples,TrainingExampleCount), + bb_delete(test_examples,TestExampleCount), + + assert(example_count(TrainingExampleCount)), + + ( + learning_rate(examples) + -> + set_learning_flag(learning_rate,TrainingExampleCount); + true + ), + learning_rate(Learning_Rate), + format('~q training examples found.~n~q test examples found.~nlearning rate=~f~n~n', + [TrainingExampleCount,TestExampleCount,Learning_Rate]), + format('Generate BDDs for all queries in the training and test set~n',[]), + initialize_fact_probabilities, + init_queries, + + format('All Queries have been generated~n',[]), + + mse_trainingset, + mse_testset, + !, + logger_stop_timer(duration), + + ground_truth_difference, + + logger_write_data, + assert(current_iteration(0)), + assert(learning_initialized), + save_model(_),save_predictions(_) + + ) + ). + +%======================================================================== +%= +%= +%= +%======================================================================== + + + +delete_all_queries :- + query_directory(Directory), + atomic_concat(['rm -f ',Directory,'query_*'],Command), + (shell(Command) -> true; true), + retractall(query_is_similar(_,_)), + retractall(query_md5(_,_)). + +empty_output_directory :- + output_directory(Directory), + atomic_concat(['rm -f ',Directory,'log.dat ', + Directory,'factprobs_*.pl ', + Directory,'predictions_*.pl'],Command), + (shell(Command) -> true; true). + +%======================================================================== +%= This predicate goes over all training and test examples, +%= calls the inference method of ProbLog and stores the resulting +%= BDDs +%======================================================================== + + +init_queries :- + + ( % go over all training examples + current_predicate(user:example/3), + user:example(ID,Query,Prob), + format('~n~n training example ~q: ~q~n',[ID,Query]), + flush_output(user), + init_one_query(ID,Query), + fail; %go to next training example + + true + ), + + ( % go over all test examples + current_predicate(user:test_example/3), + user:test_example(ID,Query,Prob), + format('~n~n test example ~q: ~q~n',[ID,Query]), + flush_output(user), + init_one_query(ID,Query), + fail; % go to next test example + + true + ). + +init_one_query(QueryID,Query) :- + output_directory(Output_Directory), + query_directory(Query_Directory), + + atomic_concat([Query_Directory,'query_',QueryID],Filename), + atomic_concat([Output_Directory,'input.txt'],Filename2), + atomic_concat([Output_Directory,'tmp_md5'],Filename3), + + ( + file_exists(Filename) + -> + format('Reuse existing BDD ~q~n~n',[Filename]); + ( + init_method(Query,_Prob,Filename,Filename2,InitCall), + once(call(InitCall)), + delete_file(Filename2) + ) + ), + + + ( + check_duplicate_bdds(true) + -> + ( + % calculate the md5sum of the bdd script file + atomic_concat(['cat ',Filename,' | md5sum | sed "s/ .*$/\')./" | sed "s/^/md5(\'/"> ',Filename3],MD5Command), + + (shell(MD5Command,0) -> true; throw(error("Something wrong with calculating the MD5 value"))), + + open(Filename3, read, Handle), + read(Handle,md5(Query_MD5)), + close(Handle), + + delete_file(Filename3), + + % Does another query exists which already has this MD5? + ( + query_md5(OtherQueryID,Query_MD5) + -> + % yippie! we can save a lot of work + ( + assert(query_is_similar(QueryID,OtherQueryID)), + format('~q is similar to ~q~2n', [QueryID,OtherQueryID]) + ); assert(query_md5(QueryID,Query_MD5)) + ) + ); + + true + ). + + +%======================================================================== +%= set all unknown fact probabilities to random values +%= +%= +%======================================================================== + +initialize_fact_probabilities :- + ( % go over all tunable facts + tunable_fact(FactID,_), + + probability_initializer(FactID,Probability,Query), + once(call(Query)), + set_fact_probability(FactID,Probability), + + + fail; % go to next tunable fact + + true + ). + +random_probability(_FactID,Probability) :- + % use probs around 0.5 to not confuse k-best search + random(Random), + Probability is 0.5+(Random-0.5)/100. + + + + + +%======================================================================== +%= updates all values of query_probability/2 and query_gradient/3 +%= should be called always before these predicates are accessed +%= if the old values are still valid, nothing happens +%======================================================================== + +update_values :- + values_correct, + !. + +update_values :- + \+ values_correct, + + %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + % delete old values + %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + once(retractall(query_probability_intern(_,_))), + once(retractall(query_gradient_intern(_,_,_))), + + + output_directory(Directory), + atomic_concat(Directory,'input.txt',Input_Filename), + %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + % start write current probabilities to file + %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + open(Input_Filename,'write',Handle), + + ( % go over all tunable facts + get_fact_probability(ID,Prob), + inv_sigmoid(Prob,Value), + format(Handle,'@x~q~n~10f~n',[ID,Value]), + + fail; % go to next tunable fact + true + ), + + close(Handle), + %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + % stop write current probabilities to file + %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + !, + + + %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + % start update values for all training examples + %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + ( % go over all training examples + current_predicate(user:example/3), + user:example(QueryID,_Query,_QueryProb), + once(call_bdd_tool(QueryID,'.')), + fail; % go to next training example + true + ), + %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + % stop update values for all training examples + %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + !, + + + %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + % start update values for all test examples + %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + ( % go over all training examples + current_predicate(user:test_example/3), + user:test_example(QueryID,_Query,_QueryProb), + once(call_bdd_tool(QueryID,'+')), + fail; % go to next training example + true + ), + %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + % stop update values for all test examples + %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + !, + + nl, + + delete_file(Input_Filename), + assert(values_correct). + + +%======================================================================== +%= +%= +%= +%======================================================================== + + +call_bdd_tool(QueryID,Symbol) :- + output_directory(Output_Directory), + query_directory(Query_Directory), + ( + query_is_similar(QueryID,_) + -> + % we don't have to evaluate the BDD + write('#'); + ( + sigmoid_slope(Slope), + problog_dir(PD), + atomic_concat([PD, + '/ProblogBDD -i "', + Output_Directory, + 'input.txt', + '" -l "', + Query_Directory, + 'query_', + QueryID, + '" -m g -id ', + QueryID, + ' -sl ',Slope, + ' > "', + Output_Directory, + 'values.pl"'],Command), + shell(Command,Error), + ( + Error = 2 + -> + throw(error('SimpleCUDD has been interrupted.')); + true + ), + ( + Error \= 0 + -> + throw(bdd_error(QueryID,Error)); + true + ), + atomic_concat([Output_Directory,'values.pl'],Values_Filename), + once(my_load(Values_Filename)), + delete_file(Values_Filename), + write(Symbol) + ) + ), + flush_output(user). + + +%======================================================================== +%= +%= +%= +%======================================================================== + +my_load(File) :- + see(File), + read(X), + my_load_intern(X), + seen. +my_load_intern(end_of_file) :- + !. +my_load_intern(query_probability(QueryID,Prob)) :- + !, + assert(query_probability_intern(QueryID,Prob)), + read(X2), + my_load_intern(X2). +my_load_intern(query_gradient(QueryID,XFactID,Value)) :- + !, + atomic_concat(x,StringFactID,XFactID), + atom_number(StringFactID,FactID), + assert(query_gradient_intern(QueryID,FactID,Value)), + read(X2), + my_load_intern(X2). +my_load_intern(X) :- + format(user_error,'Unknown atom ~q in results file.~n',[X]), + read(X2), + my_load_intern(X2). + + +%======================================================================== +%= +%= +%= +%======================================================================== +query_probability(QueryID,Prob) :- + ( + query_probability_intern(QueryID,Prob) + -> + true; + ( + query_is_similar(QueryID,OtherQueryID), + query_probability_intern(OtherQueryID,Prob) + ) + ). +query_gradient(QueryID,Fact,Value) :- + ( + query_gradient_intern(QueryID,Fact,Value) + -> + true; + ( + query_is_similar(QueryID,OtherQueryID), + query_gradient_intern(OtherQueryID,Fact,Value) + ) + ). + +%======================================================================== +%= +%= +%= +%======================================================================== + + +ground_truth_difference :- + findall(Diff,(tunable_fact(FactID,GroundTruth), + \+ var(GroundTruth), + get_fact_probability(FactID,Prob), + Diff is abs(GroundTruth-Prob)),AllDiffs), + + % if no ground truth was specified for facts + % set everything to zero + ( + AllDiffs=[] + -> + ( + MinDiff=0.0, + MaxDiff=0.0, + DiffMean=0.0 + ) ; ( + length(AllDiffs,Len), + sum_list(AllDiffs,AllDiffsSum), + min_list(AllDiffs,MinDiff), + max_list(AllDiffs,MaxDiff), + + DiffMean is AllDiffsSum/Len + ) + ), + + logger_set_variable(ground_truth_diff,DiffMean), + logger_set_variable(ground_truth_mindiff,MinDiff), + logger_set_variable(ground_truth_maxdiff,MaxDiff). + +%======================================================================== +%= Calculates the mse of training and test data +%= +%= -Float +%======================================================================== + +% calculate the mse of the training data +mse_trainingset :- + ( + current_predicate(user:example/3) + -> + ( + update_values, + findall(SquaredError, + (user:example(QueryID,_Query,QueryProb), + query_probability(QueryID,CurrentProb), + SquaredError is (CurrentProb-QueryProb)**2), + AllSquaredErrors), + + length(AllSquaredErrors,Length), + sum_list(AllSquaredErrors,SumAllSquaredErrors), + min_list(AllSquaredErrors,MinError), + max_list(AllSquaredErrors,MaxError), + MSE is SumAllSquaredErrors/Length, + + logger_set_variable(mse_trainingset,MSE), + logger_set_variable(mse_min_trainingset,MinError), + logger_set_variable(mse_max_trainingset,MaxError) + ); true + ). + + + +mse_testset :- + ( + current_predicate(user:test_example/3) + -> + ( + update_values, + findall(SquaredError, + (user:test_example(QueryID,_Query,QueryProb), + query_probability(QueryID,CurrentProb), + SquaredError is (CurrentProb-QueryProb)**2), + AllSquaredErrors), + + length(AllSquaredErrors,Length), + sum_list(AllSquaredErrors,SumAllSquaredErrors), + min_list(AllSquaredErrors,MinError), + max_list(AllSquaredErrors,MaxError), + MSE is SumAllSquaredErrors/Length, + + logger_set_variable(mse_testset,MSE), + logger_set_variable(mse_min_testset,MinError), + logger_set_variable(mse_max_testset,MaxError) + ); true + ). + + + + +%======================================================================== +%= Calculates the sigmoid function respectivly the inverse of it +%= warning: applying inv_sigmoid to 0.0 or 1.0 will yield +/-inf +%= +%= +Float, -Float +%======================================================================== + +sigmoid(T,Sig) :- + sigmoid_slope(Slope), + Sig is 1/(1+exp(-T*Slope)). + +inv_sigmoid(T,InvSig) :- + sigmoid_slope(Slope), + InvSig is -log(1/T-1)/Slope. + + +%======================================================================== +%= this functions truncates probabilities too close to 1.0 or 0.0 +%= the reason is, applying the inverse sigmoid function would yield +/- inf +%= for such extreme values +%= +%= +Float, -Float +%======================================================================== + +secure_probability(Prob,Prob_Secure) :- + TMP is max(0.00001,Prob), + Prob_Secure is min(0.99999,TMP). + + + +%======================================================================== +%= Perform one iteration of gradient descent +%= +%= assumes that everything is initialized, if the current values +%= of query_probability/2 and query_gradient/3 are not up to date +%= they will be recalculated +%= finally, the values_correct/0 is retracted to signal that the +%= probabilities of the examples have to be recalculated +%======================================================================== + + +gradient_descent :- + update_values, + + %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + % start set gradient to zero + %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + ( % go over all tunable facts + + tunable_fact(FactID,_), + bb_put(FactID,0.0), + fail; % go to next tunable fact + + true + ), + %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + % stop gradient to zero + %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + !, + + %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + % start calculate gradient + %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + alpha(Alpha), + example_count(ExampleCount), + ( % go over all training examples + current_predicate(user:example/3), + user:example(QueryID,_Query,QueryProb), + query_probability(QueryID,BDDProb), + ( + QueryProb=:=0.0 + -> + Y2=Alpha; + Y2=1.0 + ), + Y is Y2*2/ExampleCount * (BDDProb-QueryProb), + + + ( % go over all tunable facts + + tunable_fact(FactID,_), + ( + query_gradient(QueryID,FactID,GradValue) + -> + true; + GradValue=0.0 + ), + bb_get(FactID,OldValue), + NewValue is OldValue + Y*GradValue, + bb_put(FactID,NewValue), + fail; % go to next fact + + true + ), + fail; % go to next training example + + true + ), + %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + % stop calculate gradient + %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + !, + + %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + % start statistics on gradient + %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + findall(V,(tunable_fact(FactID,_),bb_get(FactID,V)),GradientValues), + + sum_list(GradientValues,GradSum), + max_list(GradientValues,GradMax), + min_list(GradientValues,GradMin), + length(GradientValues,GradLength), + GradMean is GradSum/GradLength, + + logger_set_variable(gradient_mean,GradMean), + logger_set_variable(gradient_min,GradMin), + logger_set_variable(gradient_max,GradMax), + %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + % stop statistics on gradient + %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + + + %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + % start add gradient to current probabilities + %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + + learning_rate(LearningRate), + + ( % go over all tunable facts + + tunable_fact(FactID,_), + get_fact_probability(FactID,OldProbability), + bb_delete(FactID,GradValue), + + inv_sigmoid(OldProbability,OldValue), + NewValue is OldValue -LearningRate*GradValue, + sigmoid(NewValue,NewProbability), + + % Prevent "inf" by using values too close to 1.0 + secure_probability(NewProbability,NewProbabilityS), + set_fact_probability(FactID,NewProbabilityS), + + fail; % go to next tunable fact + + true + ), + %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + % stop add gradient to current probabilities + %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + !, + + % we're done, mark old values as incorrect + retractall(values_correct). + + +%======================================================================== +%= initialize the logger module and set the flags for learning +%= +%======================================================================== + + +global_initialize :- + set_learning_flag(output_directory,'./output'), + set_learning_flag(query_directory,'./queries'), + set_learning_flag(log_frequency,5), + set_learning_flag(rebuild_bdds,false), + set_learning_flag(rebuild_bdds_it,1), + set_learning_flag(reuse_initialized_bdds,false), + set_learning_flag(learning_rate,examples), + set_learning_flag(check_duplicate_bdds,true), + set_learning_flag(probability_initializer,(FactID,P,random_probability(FactID,P))), + set_learning_flag(alpha,1.0), + set_learning_flag(sigmoid_slope,1.0), % 1.0 gives standard sigmoid + set_learning_flag(init_method,(Query,Probability,BDDFile,ProbFile, + problog_kbest_save(Query,10,Probability,_Status,BDDFile,ProbFile))), + + logger_define_variable(iteration, int), + logger_define_variable(duration,time), + logger_define_variable(mse_trainingset,float), + logger_define_variable(mse_min_trainingset,float), + logger_define_variable(mse_max_trainingset,float), + logger_define_variable(mse_testset,float), + logger_define_variable(mse_min_testset,float), + logger_define_variable(mse_max_testset,float), + logger_define_variable(gradient_mean,float), + logger_define_variable(gradient_min,float), + logger_define_variable(gradient_max,float), + logger_define_variable(ground_truth_diff,float), + logger_define_variable(ground_truth_mindiff,float), + logger_define_variable(ground_truth_maxdiff,float). + +%======================================================================== +%= +%= +%======================================================================== + +:- initialization(global_initialize). diff --git a/ProbLog/learning/logger.yap b/ProbLog/learning/logger.yap new file mode 100644 index 000000000..ecd97c948 --- /dev/null +++ b/ProbLog/learning/logger.yap @@ -0,0 +1,311 @@ +%%% -*- Mode: Prolog; -*- + + +:- module(logger,[logger_define_variable/2, + logger_define_variables/2, + logger_set_filename/1, + logger_set_delimiter/1, + logger_set_variable/2, + logger_set_variable_again/2, + logger_get_variable/2, + logger_start_timer/1, + logger_stop_timer/1, + logger_write_data/0, + logger_write_header/0]). + +:- use_module(library(system),[datime/1,mktime/2]). +:- use_module(library(lists),[append/3,member/2]). + +:- yap_flag(unknown,error). +:- style_check(single_var). + +:- bb_put(logger_filename,'out.dat'). +:- bb_put(logger_delimiter,';'). +:- bb_put(logger_variables,[]). + + +%======================================================================== +%= Defines a new variable, possible types are: int, float and time +%= +%= +Name, +Type +%======================================================================== + +logger_define_variable(Name,int) :- + !, + is_variable_already_defined(Name), + bb_delete(logger_variables,OldVariables), + append(OldVariables,[(Name,int)],NewVariables), + bb_put(logger_variables,NewVariables), + atom_concat(logger_data_,Name,Key), + bb_put(Key,null). +logger_define_variable(Name,float) :- + !, + is_variable_already_defined(Name), + bb_delete(logger_variables,OldVariables), + append(OldVariables,[(Name,float)],NewVariables), + bb_put(logger_variables,NewVariables), + atom_concat(logger_data_,Name,Key), + bb_put(Key,null). +logger_define_variable(Name,time) :- + !, + is_variable_already_defined(Name), + bb_delete(logger_variables,OldVariables), + append(OldVariables,[(Name,time)],NewVariables), + bb_put(logger_variables,NewVariables), + atom_concat(logger_data_,Name,Key), + atom_concat(logger_start_time_,Name,Key2), + bb_put(Key,null), + bb_put(Key2,null). +logger_define_variable(Name,Unknown) :- + is_variable_already_defined(Name), + write('logger_define_variable, unknown type '), + write(Unknown), + write(' for variable '), + write(Name), + nl, + fail. + +is_variable_already_defined(Name) :- + bb_get(logger_variables,Variables), + member((Name,_),Variables),!, + write('logger_define_variable, Variable '), + write(Name), + write(' is already defined!\n'), + fail; + true. + +%======================================================================== +%= +%= +%= +ListOfNames, +Type +%======================================================================== + +logger_define_variables([],_). +logger_define_variables([H|T],Type) :- + logger_define_variable(H,Type), + logger_define_variables(T,Type). + +%======================================================================== +%= Set the filename, to which the output should be appended +%= +%= +Name +%======================================================================== + +logger_set_filename(Name) :- + bb_put(logger_filename,Name). + +%======================================================================== +%= Set the delimiter for the fields +%= +%= +Delimiter +%======================================================================== + +logger_set_delimiter(Delimiter) :- + bb_put(logger_delimiter,Delimiter). +%======================================================================== +%= Set the value of the variable name. If the value is already set or +%= if the variable does not exists, an error will be displayed and the +%= Prolog will be halted. +%= +%= +Name, +Value +%======================================================================== + +logger_set_variable(Name,Value) :- + atom_concat(logger_data_,Name,Key), + ( + bb_get(Key,null) + -> + ( + bb_put(Key,Value) + );( + bb_get(Key,_) + -> + ( + write('logger_set_variable, Variable '), + write(Name), + write(' is already set'), + nl, + fail + ) ; ( + write('logger_set_variable, unknown variable '), + write(Name), + nl, + fail + ) + ) + ),!. + +%======================================================================== +%= Set the value of the variable name. If the value is already set or +%= the old value is overwritten. If the variable does not exists, an +%= error will be displayed and the Prolog will be halted. +%= +%= +Name, +Value +%======================================================================== + +logger_set_variable_again(Name,Value) :- + atom_concat(logger_data_,Name,Key), + ( + bb_get(Key,_) + -> + ( + bb_put(Key,Value) + );( + write('logger_set_variable, unknown variable '), + write(Name), + nl, + fail + ) + ),!. + + +logger_variable_is_set(Name) :- + atom_concat(logger_data_,Name,Key), + bb_get(Key,X), + X \= null. + +%======================================================================== +%= Get the value of the variable name. If the value is not yet set or +%= if the variable does not exists, an error will be displayed and the +%= Prolog will be halted. +%= +%= +Name, +Value +%======================================================================== + +logger_get_variable(Name,Value) :- + atom_concat(logger_data_,Name,Key), + ( + bb_get(Key,null) + -> + ( + write('logger_get_variable, Variable '), + write(Name), + write(' is not yet set'), + nl, + fail + );( + bb_get(Key,Value) + ; + ( + write('logger_set_variable, unknown variable '), + write(Name), + nl, + fail + ) + ) + ),!. +%======================================================================== +%= +%= +%= +Name +%======================================================================== + +logger_start_timer(Name) :- + atom_concat(logger_start_time_,Name,Key), + ( + bb_get(Key,null) + -> + ( + statistics(walltime,[StartTime,_]), + bb_put(Key,StartTime) + );( + bb_get(Key,_) + -> + ( + write('logger_start_timer, timer '), + write(Name), + write(' is already started'), + nl, + fail + );( + write('logger_start_timer, timer '), + write(Name), + write(' is not defined'), + nl, + fail + ) + ) + ),!. + + +logger_stop_timer(Name) :- + atom_concat(logger_start_time_,Name,Key), + + bb_delete(Key,StartTime), + statistics(walltime,[StopTime,_]), + + bb_put(Key,null), + + Duration is StopTime-StartTime, + + ( + logger_variable_is_set(Name) + -> + ( + logger_get_variable(Name,OldDuration), + NewDuration is Duration+OldDuration, + logger_set_variable_again(Name,NewDuration) + ); logger_set_variable(Name,Duration) + ),!. + +%======================================================================== +%= write a new line to the log file, which contains all the +%= values of the variables. afterwards, reset all variables to null. +%= +%======================================================================== + +logger_write_data :- + bb_get(logger_filename,FName), + bb_get(logger_variables,Variables), + open(FName,'append',Handle), + logger_write_data_intern(Variables,Handle), + close(Handle), + + % reset variables + findall(_,(member((Name,_),Variables),atom_concat(logger_data_,Name,Key),bb_put(Key,null)),_), + findall(_,(member((Name,time),Variables),atom_concat(logger_start_time_,Name,Key2),bb_put(Key2,null)),_). + +logger_write_data_intern([],_). +logger_write_data_intern([(Name,_Type)],Handle) :- + variablevalue_with_nullcheck(Name,Value), + write(Handle,Value), + write(Handle,'\n'). +logger_write_data_intern([(Name,_Type),Next|T],Handle) :- + variablevalue_with_nullcheck(Name,Value), + bb_get(logger_delimiter,D), + write(Handle,Value), + write(Handle,D), + logger_write_data_intern([Next|T],Handle). + +variablevalue_with_nullcheck(Name,Result) :- + atom_concat(logger_data_,Name,Key), + bb_get(Key,Value), + ( + Value=null + -> + Result = '' ; + Result=Value + ). +%======================================================================== +%= +%= +%= +%======================================================================== + +logger_write_header :- + bb_get(logger_filename,FName), + bb_get(logger_variables,Variables), + open(FName,'append',Handle), + write(Handle,'# '), + logger_write_header_intern(Variables,Handle), + write(Handle,'\n'), + close(Handle). + +logger_write_header_intern([],_). +logger_write_header_intern([(Name,_Type)],Handle) :- + write(Handle,Name). +logger_write_header_intern([(Name,_Type),Next|T],Handle) :- + bb_get(logger_delimiter,D), + write(Handle,Name), + write(Handle,D), + logger_write_header_intern([Next|T],Handle). diff --git a/ProbLog/problog.yap b/ProbLog/problog.yap new file mode 100644 index 000000000..fc79568d1 --- /dev/null +++ b/ProbLog/problog.yap @@ -0,0 +1,958 @@ +%%% -*- Mode: Prolog; -*- +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% +% ProbLog inference +% +% assumes probabilistic facts as Prob::Fact and clauses in normal Prolog format +% +% provides following inference modes (16/12/2008): +% - approximation with interval width Delta (IJCAI07): problog_delta(+Query,+Delta,-Low,-High,-Status) +% - bounds based on single probability threshold: problog_threshold(+Query,+Threshold,-Low,-High,-Status) +% - as above, but lower bound only: problog_low(+Query,+Threshold,-Low,-Status) +% - lower bound based on K most likely proofs: problog_kbest(+Query,+K,-Low,-Status) +% - explanation probability (ECML07): problog_max(+Query,-Prob,-FactsUsed) +% - exact probability: problog_exact(+Query,-Prob,-Status) +% - sampling: problog_montecarlo(+Query,+Delta,-Prob) +% +% +% angelika.kimmig@cs.kuleuven.be +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% +:- module(problog, [problog_delta/5, + problog_threshold/5, + problog_low/4, + problog_kbest/4, + problog_kbest_save/6, + problog_max/3, + problog_exact/3, + problog_montecarlo/3, + get_fact_probability/2, + set_fact_probability/2, + get_fact/2, + tunable_fact/2, + export_facts/1, + problog_help/0, + problog_dir/1, + set_problog_flag/2, + problog_flag/2, + problog_flags/0]). + +:- style_check(all). +:- yap_flag(unknown,error). + +% problog related modules +:- use_module('problog/flags',[set_problog_flag/2, + problog_flag/2, + problog_flags/0]). + +:- use_module('problog/print', [print_sep_line/0, + print_inference/2]). + +:- use_module('problog/tptree',[init_ptree/1, + delete_ptree/1, + insert_ptree/2, + count_ptree/2, + prune_check_ptree/2, + merge_ptree/3, + bdd_ptree_map/4, + bdd_ptree/3]). + +% general yap modules +:- ensure_loaded(library(lists)). +:- ensure_loaded(library(terms)). +:- ensure_loaded(library(random)). +:- ensure_loaded(library(system)). +:- ensure_loaded(library(rbtrees)). + +% op attaching probabilities to facts +:- op( 550, yfx, :: ). + +%%%%%%%%%%%%%%%%%%%%%%%% +% control predicates on various levels +%%%%%%%%%%%%%%%%%%%%%%%% + +% global over all inference methods, internal use only +:- dynamic problog_predicate/2. +% global over all inference methods, exported +:- dynamic tunable_fact/2. +:- dynamic problog_dir/1. +% global, manipulated via problog_control/2 +:- dynamic up/0. +:- dynamic limit/0. +:- dynamic mc/0. +:- dynamic remember/0. +% local to problog_delta +:- dynamic low/2. +:- dynamic up/2. +:- dynamic stopDiff/1. +% local to problog_kbest +:- dynamic current_kbest/3. +% local to problog_max +:- dynamic max_probability/1. +:- dynamic max_proof/1. +% local to problog_montecarlo +:- dynamic mc_prob/1. + +% directory where ProblogBDD executable is located +% automatically set during loading -- assumes it is in same place as this file (problog.yap) +:- getcwd(PD),retractall(problog_dir(_)),assert(problog_dir(PD)). + +%%%%%%%%%%%%%%%%%%%%%%%% +% help +%%%%%%%%%%%%%%%%%%%%%%%% + +problog_help :- + format('~2nProbLog inference currently offers the following inference methods:~n',[]), + show_inference, + format('~2nThe following global parameters are available:~n',[]), + problog_flags, + print_sep_line, + format('~n use problog_help/0 to display this information~n',[]), + format('~n use problog_flags/0 to display current parameter values~2n',[]), + print_sep_line, + nl, + flush_output. + +show_inference :- + format('~n',[]), + print_sep_line, + print_inference(call,description), + print_sep_line, + print_inference('problog_delta(+Query,+Delta,-Low,-High,-Status)','approximation with interval width Delta (IJCAI07)'), + print_inference('problog_threshold(+Query,+Threshold,-Low,-High,-Status)','bounds based on single probability threshold'), + print_inference('problog_low(+Query,+Threshold,-Low,-Status)','lower bound based on single probability threshold'), + print_inference('problog_kbest(+Query,+K,-Low,-Status)','lower bound based on K most likely proofs'), + print_inference('problog_max(+Query,-Prob,-FactsUsed)','explanation probability (ECML07)'), + print_inference('problog_exact(+Query,-Prob,-Status)','exact probability'), + print_inference('problog_montecarlo(+Query,+Delta,-Prob)','sampling with 95\%-confidence-interval-width Delta'), + print_sep_line. + +%%%%%%%%%%%%%%%%%%%%%%%% +% initialization of global parameters +%%%%%%%%%%%%%%%%%%%%%%%% + +init_global_params :- + set_problog_flag(bdd_time,60), + set_problog_flag(first_threshold,0.1), + L is 10**(-30), + set_problog_flag(last_threshold,L), + set_problog_flag(id_stepsize,0.5), + set_problog_flag(prunecheck,off), + set_problog_flag(maxsteps,1000), + set_problog_flag(mc_batchsize,1000), + set_problog_flag(mc_logfile,'log.txt'), + set_problog_flag(bdd_file,example_bdd), + set_problog_flag(dir,output), + set_problog_flag(save_bdd,false), +% problog_flags, + print_sep_line, + format('~n use problog_help/0 for information~n',[]), + format('~n use problog_flags/0 to display current parameter values~2n',[]), + print_sep_line, + nl, + flush_output. + +% parameter initialization to be called after returning to user's directory: +:- initialization(init_global_params). + +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% +% internal control flags +% if on +% - up: collect stopped derivations to build upper bound +% - limit: iterative deepening reached limit -> should go to next level +% - mc: using problog_montecarlo, i.e. proving with current sample instead of full program +% - remember: save BDD files containing script, params and mapping +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% +problog_control(on,X) :- + call(X),!. +problog_control(on,X) :- + assert(X). +problog_control(off,X) :- + retractall(X). +problog_control(check,X) :- + call(X). + +:- problog_control(off,up). +:- problog_control(off,mc). +:- problog_control(off,limit). +:- problog_control(off,remember). + +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% +% nice user syntax Prob::Fact +% automatic translation to internal hardware access format +% +% probabilities =1 are dropped -> normal Prolog fact +% +% internal fact representation +% - prefixes predicate name with problog_ +% - adds unique ID as first argument +% - adds logarithm of probability as last argument +% - keeps original arguments in between +% +% for each predicate appearing as probabilistic fact, wrapper clause is introduced: +% - head is most general instance of original fact +% - body is corresponding version of internal fact plus call to add_to_proof/2 to update current state during proving +% example: edge(A,B) :- problog_edge(ID,A,B,LogProb), add_to_proof(ID,LogProb). +% +% dynamic predicate problog_predicate(Name,Arity) keeps track of predicates that already have wrapper clause +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + +user:term_expansion(_P::( _Goal :- _Body ), _Error) :- + throw(error('we do not support this (yet?)!')). + +user:term_expansion(P::Goal,Goal) :- + P \= t(_), + P =:= 1, + !. + +user:term_expansion(P::Goal, problog:ProbFact) :- + functor(Goal, Name, Arity), + atomic_concat([problog_,Name],ProblogName), + Goal =.. [Name|Args], + append(Args,[LProb],L1), + probclause_id(IDName), + term_variables(Goal,GVars), + (GVars=[] -> ID=IDName; ID=..[IDName|GVars]), + ProbFact =.. [ProblogName,ID|L1], + (P = t(TrueProb) -> + assert(tunable_fact(ID,TrueProb)), + LProb is log(0.5) + ; + LProb is log(P) + ), + problog_predicate(Name, Arity, ProblogName). + +% introduce wrapper clause if predicate seen first time +problog_predicate(Name, Arity, _) :- + problog_predicate(Name, Arity), !. +problog_predicate(Name, Arity, ProblogName) :- + functor(OriginalGoal, Name, Arity), + OriginalGoal =.. [_|Args], + append(Args,[Prob],L1), + ProbFact =.. [ProblogName,ID|L1], + prolog_load_context(module,Mod), + assert((Mod:OriginalGoal :- ProbFact, add_to_proof(ID,Prob))), + assert(problog_predicate(Name, Arity)), + ArityPlus2 is Arity+2, + dynamic(problog:ProblogName/ArityPlus2). + +% generate next global identifier +probclause_id(ID) :- + nb_getval(probclause_counter,ID), !, + C1 is ID+1, + nb_setval(probclause_counter,C1), !. +probclause_id(0) :- + nb_setval(probclause_counter,1). + +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% +% access/update the probability of ID's fact +% hardware-access version: naively scan all problog-predicates +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% +get_fact_probability(ID,Prob) :- + get_internal_fact(ID,ProblogTerm,_ProblogName,ProblogArity), + arg(ProblogArity,ProblogTerm,Log), + Prob is exp(Log). +set_fact_probability(ID,Prob) :- + get_internal_fact(ID,ProblogTerm,ProblogName,ProblogArity), + retract(ProblogTerm), + ProblogTerm =.. [ProblogName|ProblogTermArgs], + nth(ProblogArity,ProblogTermArgs,_,KeepArgs), + NewLogProb is log(Prob), + nth(ProblogArity,NewProblogTermArgs,NewLogProb,KeepArgs), + NewProblogTerm =.. [ProblogName|NewProblogTermArgs], + assert(NewProblogTerm). + +get_internal_fact(ID,ProblogTerm,ProblogName,ProblogArity) :- + problog_predicate(Name,Arity), + atomic_concat([problog_,Name],ProblogName), + ProblogArity is Arity+2, + functor(ProblogTerm,ProblogName,ProblogArity), + arg(1,ProblogTerm,ID), + call(ProblogTerm). + +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% +% writing those facts with learnable parameters to File +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% +export_facts(File) :- + tell(File), + export_facts, + flush_output, + told. +export_facts :- + tunable_fact(ID,_), + once(write_tunable_fact(ID)), + fail. +export_facts. + +write_tunable_fact(ID) :- + get_internal_fact(ID,ProblogTerm,ProblogName,ProblogArity), + ProblogTerm =.. [_Functor,ID|Args], + atomic_concat('problog_',OutsideFunctor,ProblogName), + Last is ProblogArity-1, + nth(Last,Args,LogProb,OutsideArgs), + OutsideTerm =.. [OutsideFunctor|OutsideArgs], + Prob is exp(LogProb), + format('~w :: ~q.~n',[Prob,OutsideTerm]). + +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% +% recover fact for given id +% list version not exported (yet?) +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% +get_fact(ID,OutsideTerm) :- + get_internal_fact(ID,ProblogTerm,ProblogName,ProblogArity), + ProblogTerm =.. [_Functor,ID|Args], + atomic_concat('problog_',OutsideFunctor,ProblogName), + Last is ProblogArity-1, + nth(Last,Args,_LogProb,OutsideArgs), + OutsideTerm =.. [OutsideFunctor|OutsideArgs]. + +get_fact_list([],[]). +get_fact_list([ID|IDs],[Fact|Facts]) :- + get_fact(ID,Fact), + get_fact_list(IDs,Facts). + +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% +% ProbLog inference, core methods +% +% state of proving saved in two backtrackable global variables +% - problog_current_proof holds list of IDs of clauses used +% - problog_probability holds the sum of their log probabilities +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + +% called "inside" probabilistic facts to update current state of proving: +% if number of steps exceeded, fail +% if fact used before, succeed and keep status as is +% if not prunable, calculate probability and +% if threshold exceeded, add stopped derivation to upper bound and fail +% else update state and succeed +add_to_proof(ID,Prob) :- + montecarlo_check(ID), + b_getval(problog_steps,MaxSteps), + b_getval(problog_probability, CurrentP), + nb_getval(problog_threshold, CurrentThreshold), + b_getval(problog_current_proof, IDs), + ( MaxSteps =< 0 -> + fail + ; + ( memberchk(ID, IDs) -> + true + ; + \+ prune_check([ID|IDs],1), + multiply_probabilities(CurrentP, Prob, NProb), + ( NProb < CurrentThreshold -> + upper_bound([ID|IDs]), + fail + ; + b_setval(problog_probability, NProb), + b_setval(problog_current_proof, [ID|IDs]) + ) + ), + Steps is MaxSteps-1, + b_setval(problog_steps,Steps) + ). + +% if in monte carlo mode, check array to see if fact can be used +montecarlo_check(ID) :- + ( + problog_control(check,mc) + -> + ( + array_element(mc_sample,ID,V), + ( + V == 1 -> true + ; + V == 2 -> fail + ; + new_sample(ID) + ) + ) + ; + true + ). + +new_sample(ID) :- + get_fact_probability(ID,Prob), + random(R), + R=1 -> don't need []=true-case for tries +upper_bound(List) :- + problog_control(on,limit), + problog_control(check,up), + reverse(List,R), + (prune_check(R,2) -> true; insert_ptree(R,2)). + +multiply_probabilities(CurrentLogP, LogProb, NLogProb) :- + NLogProb is CurrentLogP+LogProb. + +% this is called by all inference methods before the actual ProbLog goal +% to set up environment for proving +init_problog(Threshold) :- + LT is log(Threshold), + b_setval(problog_probability, 0.0), + b_setval(problog_current_proof, []), + nb_setval(problog_threshold, LT), + problog_flag(maxsteps,MaxS), + b_setval(problog_steps, MaxS), + problog_control(off,limit). + +% idea: proofs that are refinements of known proof can be pruned as they don't add probability mass +% note that current ptree implementation doesn't provide the check as there's no efficient method known so far... +prune_check(Proof,TreeID) :- + problog_flag(prunecheck,on), + prune_check_ptree(Proof,TreeID). + +% to call a ProbLog goal, patch all subgoals with the user's module context +% (as logical part is there, but probabilistic part in problog) +problog_call(Goal) :- + yap_flag(typein_module,Module), + put_module(Goal,Module,ModGoal), + call(ModGoal). + +put_module((Mod:Goal,Rest),Module,(Mod:Goal,Transformed)) :- + !, + put_module(Rest,Module,Transformed). +put_module((Goal,Rest),Module,(Module:Goal,Transformed)) :- + !, + put_module(Rest,Module,Transformed). +put_module((Mod:Goal),_Module,(Mod:Goal)) :- + !. +put_module(Goal,Module,Module:Goal). + +% end of core + +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% +% evaluating a DNF given as trie using BDD +% input: ID of trie to be used +% output: probability and status (to catch potential failures/timeouts from outside) +% +% with internal BDD timeout (set using problog flag bdd_time) +% +% bdd_ptree/3 constructs files for ProblogBDD from the trie +% +% if calling ProblogBDD doesn't exit successfully, status will be timeout +% +% writes number of proofs in trie and BDD time to standard user output +% +% if remember is on, input files for ProblogBDD will be saved +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + +eval_dnf(ID,Prob,Status) :- + ((ID = 1, problog_flag(save_bdd,true)) -> problog_control(on,remember); problog_control(off,remember)), + count_ptree(ID,NX), + format(user,'~w proofs~n',[NX]), + problog_flag(dir,DirFlag), + problog_flag(bdd_file,BDDFileFlag), + atomic_concat([DirFlag,BDDFileFlag],BDDFile), + problog_flag(bdd_par_file,BDDParFileFlag), + atomic_concat([DirFlag,BDDParFileFlag],BDDParFile), + (problog_control(check,remember) -> + bdd_ptree_map(ID,BDDFile,BDDParFile,Mapping), + atomic_concat([DirFlag,'save_map'],MapFile), + tell(MapFile), + format('mapping(~q).~n',[Mapping]), + flush_output, + told + ; + bdd_ptree(ID,BDDFile,BDDParFile) + ), + problog_flag(bdd_time,BDDTime), + problog_flag(bdd_result,ResultFileFlag), + atomic_concat([DirFlag,ResultFileFlag],ResultFile), + problog_dir(PD), + atomic_concat([PD,'/ProblogBDD -l ',BDDFile,' -i ',BDDParFile,' -m p -t ', BDDTime,' > ', ResultFile],Command), + statistics(walltime,_), + shell(Command,Return), + ( + Return =\= 0 + -> + Status = timeout + ; + ( + statistics(walltime,[_,E3]), + format(user,'~w ms BDD processing~n',[E3]), + see(ResultFile), + read(probability(Prob)), + seen, + delete_file(ResultFile), + Status = ok + ) + ), + (problog_control(check,remember) -> + atomic_concat([DirFlag,'save_script'],SaveBDDFile), + rename_file(BDDFile,SaveBDDFile), + atomic_concat([DirFlag,'save_params'],SaveBDDParFile), + rename_file(BDDParFile,SaveBDDParFile) + ; + true + ), + problog_control(off,remember). + +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% +% different inference methods +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% +% approximate inference: bounds based on single probability threshold +% problog_threshold(+Goal,+Threshold,-LowerBound,-UpperBound,-Status) +% +% use backtracking over problog_call to get all solutions +% +% trie 1 collects proofs, trie 2 collects stopped derivations, trie 3 is used to unit them for the upper bound +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + +problog_threshold(Goal, Threshold, _, _, _) :- + problog_control(on,up), + init_problog_threshold(Threshold), + problog_call(Goal), + add_solution, + fail. +problog_threshold(_, _, LP, UP, Status) :- + compute_bounds(LP, UP, Status). + +init_problog_threshold(Threshold) :- + init_ptree(1), + init_ptree(2), + init_problog(Threshold). + +add_solution :- + b_getval(problog_current_proof, IDs), + (IDs == [] -> R = true ; reverse(IDs,R)), + insert_ptree(R,1). + +compute_bounds(LP, UP, Status) :- + eval_dnf(1,LP,StatusLow), + (StatusLow \== ok -> + Status = StatusLow + ; + merge_ptree(1,2,3), + eval_dnf(3,UP,Status)), + delete_ptree(1), + delete_ptree(2), + delete_ptree(3). + +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% +% approximate inference: lower bound based on all proofs above probability threshold +% problog_low(+Goal,+Threshold,-LowerBound,-Status) +% +% same as problog_threshold/5, but lower bound only (no stopped derivations stored) +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + +problog_low(Goal, Threshold, _, _) :- + problog_control(off,up), + init_problog_low(Threshold), + problog_call(Goal), + add_solution, + fail. +problog_low(_, _, LP, Status) :- + eval_dnf(1,LP,Status), + delete_ptree(1). + +init_problog_low(Threshold) :- + init_ptree(1), + init_problog(Threshold). + +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% +% approximate inference: bounds by iterative deepening up to interval width Delta +% problog_delta(+Goal,+Delta,-LowerBound,-UpperBound,-Status) +% +% wraps iterative deepening around problog_threshold, i.e. +% - starts with threshold given by first_threshold flag +% - if Up-Low >= Delta, multiply threshold by factor given in id_stepsize flag and iterate +% (does not use problog_threshold as trie 1 is kept over entire search) +% +% local dynamic predicates low/2, up/2, stopDiff/1 +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + +problog_delta(Goal, Delta, Low, Up, Status) :- + problog_control(on,up), + problog_flag(first_threshold,InitT), + init_problog_delta(InitT,Delta), + problog_delta_id(Goal,Status), + delete_ptree(1), + delete_ptree(2), + (retract(low(_,Low)) -> true; true), + (retract(up(_,Up)) -> true; true). + + +init_problog_delta(Threshold,Delta) :- + retractall(low(_,_)), + retractall(up(_,_)), + retractall(stopDiff(_)), + init_ptree(1), + init_ptree(2), + assert(low(0,0.0)), + assert(up(0,1.0)), + assert(stopDiff(Delta)), + init_problog(Threshold). + +problog_delta_id(Goal, _) :- + problog_call(Goal), + add_solution, % reused from problog_threshold + fail. +problog_delta_id(Goal, Status) :- + evaluateStep(Ans,StatusE), + problog_flag(last_threshold_log,Stop), + nb_getval(problog_threshold,Min), + (StatusE \== ok -> + Status = StatusE + ; + ( + Ans = 1 -> + Status = ok + ; + Min =< Stop -> + Status = stopreached + ; + problog_control(check,limit) -> + problog_control(off,limit), + problog_flag(id_stepsize_log,Step), + New is Min+Step, + nb_setval(problog_threshold,New), + problog_delta_id(Goal, Status) + ; + true + )). + +% call the dnf evaluation where needed +evaluateStep(Ans,Status) :- once(evalStep(Ans,Status)). + +evalStep(Ans,Status) :- + stopDiff(Delta), + count_ptree(1,NProofs), + count_ptree(2,NCands), + format(user,'~w proofs, ~w stopped derivations~n',[NProofs,NCands]), + flush_output(user), + eval_lower(NProofs,Low,StatusLow), + (StatusLow \== ok -> + Status = StatusLow + ; + up(_,OUP), + IntDiff is OUP-Low, + ((IntDiff < Delta; IntDiff =:= 0) -> + Up=OUP, StatusUp = ok + ; + eval_upper(NCands,Up,StatusUp), + delete_ptree(2), + init_ptree(2), + delete_ptree(3) + ), + (StatusUp \== ok -> + Status = StatusUp + ; + Diff is Up-Low, + format(user,'difference: ~6f~n',[Diff]), + flush_output(user), + ((Diff < Delta; Diff =:= 0) -> Ans = 1; Ans = 0), + Status = ok)). + +% no need to re-evaluate if no new proofs found on this level +eval_lower(N,P,ok) :- + low(N,P). +% evaluate if there are proofs +eval_lower(N,P,Status) :- + N > 0, + low(OldN,_), + N \= OldN, + eval_dnf(1,P,Status), + (Status = ok -> + retract(low(_,_)), + assert(low(N,P)), + format(user,'lower bound: ~6f~n',[P]), + flush_output(user) + ; + true). + +% if no stopped derivations, up=low +eval_upper(0,P,ok) :- + retractall(up(_,_)), + low(N,P), + assert(up(N,P)). +% else merge proofs and stopped derivations to get upper bound +% in case of timeout or other problems, skip and use bound from last level +eval_upper(N,UpP,ok) :- + N > 0, + merge_ptree(1,2,3), + eval_dnf(3,UpP,StatusUp), + (StatusUp = ok -> + retract(up(_,_)), + assert(up(N,UpP)) + ; + format(user,'~w - continue using old up~n',[StatusUp]), + flush_output(user), + up(_,UpP)). + +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% +% explanation probability - returns list of facts used or constant 'unprovable' as third argument +% problog_max(+Goal,-Prob,-Facts) +% +% uses iterative deepening with samw parameters as bounding algorithm +% threshold gets adapted whenever better proof is found +% +% uses local dynamic predicates max_probability/1 and max_proof/1 +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + +problog_max(Goal, Prob, Facts) :- + problog_control(off,up), + problog_flag(first_threshold,InitT), + init_problog_max(InitT), + problog_max_id(Goal, Prob, FactIDs), + ( FactIDs == unprovable -> Facts = unprovable; + get_fact_list(FactIDs,Facts)). + +init_problog_max(Threshold) :- + retractall(max_probability(_)), + retractall(max_proof(_)), + assert(max_probability(-999999)), + assert(max_proof(unprovable)), + init_problog(Threshold). + +update_max :- + b_getval(problog_probability,CurrP), + max_probability(MaxP), + (CurrP =< MaxP -> + fail + ; + b_getval(problog_current_proof, IDs), + reverse(IDs,R), + retractall(max_proof(_)), + assert(max_proof(R)), + nb_setval(problog_threshold, CurrP), + retractall(max_probability(_)), + assert(max_probability(CurrP))). + +problog_max_id(Goal, _Prob, _Clauses) :- + problog_call(Goal), + update_max, + fail. +problog_max_id(Goal, Prob, Clauses) :- + max_probability(MaxP), + nb_getval(problog_threshold, LT), + problog_flag(last_threshold_log,ToSmall), + ((MaxP >= LT ; \+ problog_control(check,limit); LT < ToSmall) -> + max_proof(Clauses), + Prob is exp(MaxP) + ; + problog_flag(id_stepsize_log,Step), + NewLT is LT+Step, + nb_setval(problog_threshold, NewLT), + problog_control(off,limit), + problog_max_id(Goal, Prob, Clauses)). + +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% +% lower bound using k best proofs +% problog_kbest(+Goal,+K,-Prob,-Status) +% +% does iterative deepening search similar to problog_max, but for k(>=1) most likely proofs +% afterwards uses BDD evaluation to calculate probability (also for k=1 -> uniform treatment in learning) +% +% uses dynamic local predicate current_kbest/3 to collect proofs, +% only builds trie at the end (as probabilities of single proofs are important here) +% +% note: >k proofs will be used if the one at position k shares its probability with others, +% as all proofs with that probability will be included +% +% version with _save at the end renames files for ProblogBDD to keep them +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% +problog_kbest_save(Goal, K, Prob, Status, BDDFile, ParamFile) :- + problog_kbest(Goal, K, Prob, Status), + ( Status=ok -> + problog_flag(bdd_file,InternBDDFlag), + problog_flag(bdd_par_file,InternParFlag), + problog_flag(dir,DirFlag), + atomic_concat([DirFlag,InternBDDFlag],InternBDD), + atomic_concat([DirFlag,InternParFlag],InternPar), + rename_file(InternBDD,BDDFile), + rename_file(InternPar,ParamFile) + ; + true). + +problog_kbest(Goal, K, Prob, Status) :- + problog_control(off,up), + problog_flag(first_threshold,InitT), + init_problog_kbest(InitT), + problog_kbest_id(Goal, K), + retract(current_kbest(_,ListFound,_NumFound)), + build_prefixtree(ListFound), + eval_dnf(1,Prob,Status), + delete_ptree(1). + +init_problog_kbest(Threshold) :- + retractall(current_kbest(_,_,_)), + assert(current_kbest(-999999,[],0)), %(log-threshold,proofs,num_proofs) + init_ptree(1), + init_problog(Threshold). + +problog_kbest_id(Goal, K) :- + problog_call(Goal), + update_kbest(K), + fail. +problog_kbest_id(Goal, K) :- + current_kbest(CurrentBorder,_,Found), + nb_getval(problog_threshold, Min), + problog_flag(last_threshold_log,ToSmall), + ((Found>=K ; \+ problog_control(check,limit) ; Min < CurrentBorder ; Min < ToSmall) -> + true + ; + problog_flag(id_stepsize_log,Step), + NewLT is Min+Step, + nb_setval(problog_threshold, NewLT), + problog_control(off,limit), + problog_kbest_id(Goal, K)). + +update_kbest(K) :- + b_getval(problog_probability,NewLogProb), + current_kbest(LogThreshold,_,_), + (NewLogProb>=LogThreshold -> + b_getval(problog_current_proof,RevProof), + reverse(RevProof,Proof), + update_current_kbest(K,NewLogProb,Proof) + ; + fail). + +update_current_kbest(_,NewLogProb,Cl) :- + current_kbest(_,List,_), + memberchk(NewLogProb-Cl,List), + !. +update_current_kbest(K,NewLogProb,Cl) :- + retract(current_kbest(OldThres,List,Length)), + sorted_insert(NewLogProb-Cl,List,NewList), + NewLength is Length+1, + (NewLength < K -> + assert(current_kbest(OldThres,NewList,NewLength)) + ; + (NewLength>K -> + First is NewLength-K+1, + cutoff(NewList,NewLength,First,FinalList,FinalLength) + ; FinalList=NewList, FinalLength=NewLength), + FinalList=[NewThres-_|_], + nb_setval(problog_threshold,NewThres), + assert(current_kbest(NewThres,FinalList,FinalLength))). + +sorted_insert(A,[],[A]). +sorted_insert(A-LA,[B1-LB1|B], [A-LA,B1-LB1|B] ) :- + A =< B1. +sorted_insert(A-LA,[B1-LB1|B], [B1-LB1|C] ) :- + A > B1, + sorted_insert(A-LA,B,C). + +% keeps all entries with lowest probability, even if implying a total of more than k +cutoff(List,Len,1,List,Len) :- !. +cutoff([P-L|List],Length,First,[P-L|List],Length) :- + nth(First,[P-L|List],PF-_), + PF=:=P, + !. +cutoff([_|List],Length,First,NewList,NewLength) :- + NextFirst is First-1, + NextLength is Length-1, + cutoff(List,NextLength,NextFirst,NewList,NewLength). + +build_prefixtree([]). +build_prefixtree([_-[]|_List]) :- + !, + insert_ptree(true,1). +build_prefixtree([_-L|List]) :- + insert_ptree(L,1), + build_prefixtree(List). + +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% +% exact probability +% problog_exact(+Goal,-Prob,-Status) +% +% using all proofs = using all proofs with probability > 0 +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + +problog_exact(Goal,Prob,Status) :- + problog_low(Goal,0,Prob,Status). + +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% +% probability by sampling: +% running another N samples until 95percentCI-width Next is PositiveSoFar+1; Next=PositiveSoFar), + Prob is Next/SamplesNew, + Epsilon is 2*sqrt(Prob*(1-Prob)/SamplesNew), + Low is Prob-Epsilon, + High is Prob+Epsilon, + Diff is 2*Epsilon, + statistics(walltime,[T2,_]), + Time is (T2-InitialTime)/1000, + count_ptree(1,CacheSize), + format('~n~w samples~nestimated probability ~w~n95 percent confidence interval [~w,~w]~n',[SamplesNew,Prob,Low,High]), + open(File,append,Log), + format(Log,'~w ~8f ~8f ~8f ~8f ~3f ~w ~w~n',[SamplesNew,Prob,Low,High,Diff,Time,CacheSize,Next]), + close(Log), + ((Diff format('Runtime ~w sec~2n',[Time]),assert(mc_prob(Prob)) + ; + montecarlo(Goal,Delta,K,SamplesNew,File,Next,InitialTime)). + +% continue until next K samples done +montecarlo(Goal,Delta,K,SamplesSoFar,File,PositiveSoFar,InitialTime) :- + SamplesNew is SamplesSoFar+1, + copy_term(Goal,GoalC), + (mc_prove(GoalC) -> Next is PositiveSoFar+1; Next=PositiveSoFar), + montecarlo(Goal,Delta,K,SamplesNew,File,Next,InitialTime). + +mc_prove(A) :- !, + (get_some_proof(A) -> + clean_sample + ; + clean_sample,fail + ). + +clean_sample :- + reset_static_array(mc_sample), + fail. +clean_sample. + +% find new proof +get_some_proof(Goal) :- + init_problog(0), + problog_call(Goal), + b_getval(problog_current_proof,Used), + (Used == [] -> Proof=true; reverse(Used,Proof)), + insert_ptree(Proof,1). + diff --git a/ProbLog/problog/flags.yap b/ProbLog/problog/flags.yap new file mode 100644 index 000000000..024c837c0 --- /dev/null +++ b/ProbLog/problog/flags.yap @@ -0,0 +1,284 @@ +%%% -*- Mode: Prolog; -*- + +:- module(flags, [set_problog_flag/2, + problog_flag/2, + problog_flags/0]). + +:- style_check(all). +:- yap_flag(unknown,error). + +:- use_module(print, [print_param/4, + print_sep_line/0]). + +:- ensure_loaded(library(system)). + +:- dynamic bdd_time/1, first_threshold/1, last_threshold/1, id_stepsize/1, prunecheck/1, maxsteps/1, mc_batchsize/1, mc_logfile/1, bdd_file/1, bdd_par_file/1, bdd_result/1, work_dir/1, save_bdd/1. + +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% +% global parameters that can be set using set_problog_flag/2 +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + +problog_flag(Flag,Option) :- + get_problog_flag(Flag,Option). +get_problog_flag(bdd_time,X) :- + bdd_time(X). +get_problog_flag(first_threshold,X) :- + first_threshold(X). +get_problog_flag(last_threshold,X) :- + last_threshold(L), + X is exp(L). +get_problog_flag(last_threshold_log,X) :- + last_threshold(X). +get_problog_flag(id_stepsize,X) :- + id_stepsize(L), + X is exp(L). +get_problog_flag(id_stepsize_log,X) :- + id_stepsize(X). +get_problog_flag(prunecheck,X) :- + prunecheck(X). +get_problog_flag(maxsteps,X) :- + maxsteps(X). +get_problog_flag(mc_batchsize,X) :- + mc_batchsize(X). +get_problog_flag(mc_logfile,X) :- + mc_logfile(X). +get_problog_flag(bdd_file,X) :- + bdd_file(X). +get_problog_flag(bdd_par_file,X) :- + bdd_par_file(X). +get_problog_flag(bdd_result,X) :- + bdd_result(X). +get_problog_flag(dir,X) :- + work_dir(X). +get_problog_flag(save_bdd,X) :- + save_bdd(X). + +%%%%%%%%%%%% +% BDD timeout in seconds, used as option in BDD tool +%%%%%%%%%%%% + +set_problog_flag(bdd_time,X) :- + (\+ integer(X); X<0), + !, + format(user,'\% ERROR: value must be positive integer!~n',[]), + flush_output(user), + fail. +set_problog_flag(bdd_time,X) :- + retractall(bdd_time(_)), + assert(bdd_time(X)). + + +%%%%%%%%%%%% +% iterative deepening on minimal probabilities (delta, max, kbest): +% - first threshold (not in log-space as only used to retrieve argument for init_threshold/1, which is also used with user-supplied argument) +% - last threshold to ensure termination in case infinite search space (saved in log-space for easy comparison with current values during search) +% - factor used to decrease threshold for next level, NewMin=Factor*OldMin (saved in log-space) +%%%%%%%%%%%% + +set_problog_flag(first_threshold,X) :- + (\+ number(X); X<0 ; X>1), + !, + format(user,'\% ERROR: value must be in [0,1]!~n',[]), + flush_output(user), + fail. +set_problog_flag(first_threshold,X) :- + retractall(first_threshold(_)), + assert(first_threshold(X)). + +set_problog_flag(last_threshold,X) :- + (\+ number(X); X<0 ; X>1), + !, + format(user,'\% ERROR: value must be in [0,1]!~n',[]), + flush_output(user), + fail. +set_problog_flag(last_threshold,X) :- + retractall(last_threshold(_)), + L is log(X), + assert(last_threshold(L)). + +set_problog_flag(id_stepsize,X) :- + (\+ number(X); X=<0 ; X>=1), + !, + format(user,'\% ERROR: value must be in ]0,1[!~n',[]), + flush_output(user), + fail. +set_problog_flag(id_stepsize,X) :- + retractall(id_stepsize(_)), + L is log(X), + assert(id_stepsize(L)). + + +%%%%%%%%%%%% +% prune check stops derivations if they use a superset of facts already known to form a proof +% (very) costly test, can be switched on/off here +%%%%%%%%%%%% + +set_problog_flag(prunecheck,on) :- + !, + format(user,'WARNING: prune check not implemented, will fail~n',[]), + flush_output(user), + retractall(prunecheck(_)), + assert(prunecheck(on)). +set_problog_flag(prunecheck,off) :- + !, + retractall(prunecheck(_)), + assert(prunecheck(off)). +set_problog_flag(prunecheck,_) :- + format(user,'\% ERROR: value must be \'on\' or \'off\'!~n',[]), + flush_output(user), + fail. + +%%%%%%%%%%%% +% max number of calls to probabilistic facts per derivation (to ensure termination) +%%%%%%%%%%%% + +set_problog_flag(maxsteps,X) :- + (\+ integer(X); X<0), + !, + format(user,'\% ERROR: value must be positive integer!~n',[]), + flush_output(user), + fail. +set_problog_flag(maxsteps,X) :- + retractall(maxsteps(_)), + assert(maxsteps(X)). + + +%%%%%%%%%%%% +% montecarlo: recalculate current approximation after N samples +%%%%%%%%%%%% + +set_problog_flag(mc_batchsize,X) :- + (\+ integer(X); X<0), + !, + format(user,'\% ERROR: value must be positive integer!~n',[]), + flush_output(user), + fail. +set_problog_flag(mc_batchsize,X) :- + retractall(mc_batchsize(_)), + assert(mc_batchsize(X)). + +%%%%%%%%%%%% +% montecarlo: write log to this file +%%%%%%%%%%%% + +set_problog_flag(mc_logfile,X) :- + \+ atom(X), + !, + format(user,'\% ERROR: value must be atom!~n',[]), + flush_output(user), + fail. +set_problog_flag(mc_logfile,X) :- + retractall(mc_logfile(_)), + assert(mc_logfile(X)). + +%%%%%%%%%%%% +% files to write BDD script and pars +% bdd_file overwrites bdd_par_file with matching extended name +% if different name wanted, respect order when setting +%%%%%%%%%%%% + +set_problog_flag(bdd_file,X) :- + \+ atom(X), + !, + format(user,'\% ERROR: value must be atom!~n',[]), + flush_output(user), + fail. +set_problog_flag(bdd_file,X) :- + retractall(bdd_file(_)), + atomic_concat(X,'_probs',Y), + set_problog_flag(bdd_par_file,Y), + atomic_concat(X,'_res',Z), + set_problog_flag(bdd_result,Z), + assert(bdd_file(X)). +set_problog_flag(bdd_par_file,X) :- + \+ atom(X), + !, + format(user,'\% ERROR: value must be atom!~n',[]), + flush_output(user), + fail. +set_problog_flag(bdd_par_file,X) :- + retractall(bdd_par_file(_)), + assert(bdd_par_file(X)). +set_problog_flag(bdd_result,X) :- + \+ atom(X), + !, + format(user,'\% ERROR: value must be atom!~n',[]), + flush_output(user), + fail. +set_problog_flag(bdd_result,X) :- + retractall(bdd_result(_)), + assert(bdd_result(X)). + +%%%%%%%%%%%% +% working directory: all the temporary and output files will be located there +%%%%%%%%%%%% +set_problog_flag(dir,X) :- + \+ atom(X), + !, + format(user,'\% ERROR: value must be atom!~n',[]), + flush_output(user), + fail. +set_problog_flag(dir,X) :- + retractall(work_dir(_)), + atomic_concat([X,'/'],D), + atomic_concat(['mkdir ',D],Mkdir), + (file_exists(X) -> true; shell(Mkdir)), + assert(work_dir(D)). + +%%%%%%%%%%%% +% save BDD information for the (last) lower bound BDD used during inference +% produces three files named save_script, save_params, save_map +% located in the directory given by problog_flag dir +%%%%%%%%%%%% + +set_problog_flag(save_bdd,true) :- + !, + retractall(save_bdd(_)), + assert(save_bdd(true)). +set_problog_flag(save_bdd,false) :- + !, + retractall(save_bdd(_)), + assert(save_bdd(false)). +set_problog_flag(save_bdd,_) :- + format(user,'\% ERROR: value must be \'true\' or \'false\'!~n',[]), + flush_output(user), + fail. + + +%%%%%%%%%%%%%%%%%%%%%%%% +% show values +%%%%%%%%%%%%%%%%%%%%%%%% + +problog_flags :- + format('~n',[]), + print_sep_line, + format('problog flags: use set_problog_flag(Flag,Option) to change, problog_flag(Flag,Option) to view~n',[]), + print_sep_line, + print_param(description,value,flag,option), + print_sep_line, + problog_flag(bdd_time,StopBDD), + print_param('BDD computation timeout in seconds',StopBDD,'bdd_time','positive integer'), + problog_flag(first_threshold,First), + print_param('starting threshold iterative deepening',First,'first_threshold','0 =< Option =< 1'), + problog_flag(last_threshold,Last), + print_param('stopping threshold iterative deepening',Last,'last_threshold','0 =< Option =< 1'), + problog_flag(id_stepsize,Decrease), + print_param('threshold shrinking factor iterative deepening',Decrease,'id_stepsize','0 < Option < 1'), + problog_flag(prunecheck,Check), + print_param('stop derivations including all facts of known proof',Check,'prunecheck','on/off'), + problog_flag(maxsteps,Steps), + print_param('max. number of prob. steps per derivation',Steps,'maxsteps','positive integer'), + problog_flag(mc_batchsize,MCBatch), + print_param('number of samples before update in montecarlo',MCBatch,'mc_batchsize','positive integer'), + problog_flag(mc_logfile,MCFile), + print_param('logfile for montecarlo',MCFile,'mc_logfile','atom'), + problog_flag(bdd_file,BDDFile), + print_param('file for BDD script',BDDFile,'bdd_file','atom'), + problog_flag(dir,WorkDir), + print_param('directory for files',WorkDir,'dir','atom'), + problog_flag(save_bdd,Save), + print_param('save BDD files for (last) lower bound',Save,'save_bdd','true/false'), + print_sep_line, + format('~n',[]), + flush_output. + diff --git a/ProbLog/problog/print.yap b/ProbLog/problog/print.yap new file mode 100644 index 000000000..a475ce29a --- /dev/null +++ b/ProbLog/problog/print.yap @@ -0,0 +1,24 @@ +%%% -*- Mode: Prolog; -*- +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% +% printing functions used for problog_help and problog_flags +% collected here to have formatting at one place +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + +:- module(print, [print_param/4, + print_sep_line/0, + print_inference/2]). + +print_param(Keyword,Value,Function,Legal) :- + format(user,'~w~55+~q~15+~w~30+~w~25+~n',[Keyword,Value,Function,Legal]). +print_sep_line :- + sep_line(125). +sep_line(0) :- + !, + format('~n',[]). +sep_line(N) :- + format('-',[]), + NN is N-1, + sep_line(NN). + +print_inference(Call,Description) :- + format(user,'~w~65+~w~60+~n',[Call,Description]). diff --git a/ProbLog/problog/tptree.yap b/ProbLog/problog/tptree.yap new file mode 100644 index 000000000..a4fc28ee5 --- /dev/null +++ b/ProbLog/problog/tptree.yap @@ -0,0 +1,500 @@ +%%% -*- Mode: Prolog; -*- + +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% +% prefix-trees for managing a DNF +% remembers shortest prefix of a conjunction only (i.e. a*b+a*b*c results in a*b only, but b*a+a*b*c is not reduced) +% children are sorted, but branches aren't (to speed up search while keeping structure sharing from proof procedure) +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + +:- module(ptree,[init_ptree/1, + delete_ptree/1, + rename_ptree/2, + member_ptree/2, + enum_member_ptree/2, + insert_ptree/2, + delete_ptree/2, + edges_ptree/2, + count_ptree/2, + prune_check_ptree/2, + empty_ptree/1, + merge_ptree/3, + bdd_ptree/3, + bdd_ptree_map/4 + ]). + +:- use_module(library(tries), + [ + trie_open/1, + trie_close/1, + trie_stats/4, + trie_check_entry/3, + trie_get_entry/2, + trie_put_entry/3, + trie_remove_entry/1, + trie_usage/4, + trie_dup/2, + trie_join/2, + trie_traverse/2 + ]). + +:- use_module(library(ordsets), + [ + ord_subset/2 + ]). + +:- style_check(all). +:- yap_flag(unknown,error). + +:- use_module(flags,[problog_flag/2]). +:- ensure_loaded(library(lists)). +:- ensure_loaded(library(system)). + +% name lexicon external - internal +sym(1,tree1) :- !. +sym(2,tree2) :- !. +sym(3,tree3) :- !. +sym(N,AN) :- atomic_concat([tree,N],AN). + +%%%%%%%%%%%%%%%%%%%%%%%% +% ptree basics +%%%%%%%%%%%%%%%%%%%%%%%% + +init_ptree(ID) :- + sym(ID,Sym), + trie_open(Trie), + nb_setval(Sym, Trie). + +delete_ptree(ID) :- + sym(ID,Sym), + nb_getval(Sym, Trie), !, + trie_close(Trie), + trie_open(NewTrie), + nb_setval(Sym, NewTrie). +delete_ptree(_). + +rename_ptree(OldID,NewID) :- + sym(OldID,OldSym), + sym(NewID,NewSym), + nb_getval(OldSym, Trie), + nb_set_shared_val(NewSym, Trie). + +empty_ptree(ID) :- + sym(ID,Sym), + nb_getval(Sym, Trie), + trie_usage(Trie, 0, 0, 0). + + +%%%%%%%%%%%%%%%%%%%%%%%% +% member +%%%%%%%%%%%%%%%%%%%%%%%% + +% non-backtrackable (to check) +member_ptree(List,ID) :- + sym(ID,Sym), + nb_getval(Sym, Trie), + trie_check_entry(Trie, List, _). + +% backtrackable (to list) +enum_member_ptree(ID,List) :- + sym(ID,Sym), + nb_getval(Sym, Tree), + trie_path(Tree, List). + +trie_path(Tree, List) :- + trie_traverse(Tree,Ref), + trie_get_entry(Ref, List). + +%%%%%%%%%%%%%%%%%%%%%%%% +% insert conjunction +%%%%%%%%%%%%%%%%%%%%%%%% +insert_ptree(true,ID) :- + sym(ID,Sym), + !, + nb_getval(Sym, Trie), + trie_close(Trie), + trie_open(NTrie), + trie_put_entry(NTrie, true, _). +insert_ptree(List,ID) :- + sym(ID,Sym), + nb_getval(Sym, Trie), + trie_put_entry(Trie, List, _). + +%%%%%%%%%%%%%%%%%%%%%%%% +% delete conjunction +%%%%%%%%%%%%%%%%%%%%%%%% +delete_ptree(List,ID) :- + sym(ID,Sym), + nb_getval(Sym, Trie), + trie_check_entry(Trie, List, Ref), + trie_remove_entry(Ref). + + +%%%%%%%% +% return list -Edges of all edge labels in ptree +% doesn't use any heuristic to order those for the BDD +% (automatic reordering has to do the job) +%%%%%%%%% +edges_ptree(ID,[]) :- + empty_ptree(ID), + !. +edges_ptree(ID,[]) :- + sym(ID,Sym), + nb_getval(Sym, Trie), + trie_check_entry(Trie, true, _), + !. +edges_ptree(ID,Edges) :- + sym(ID,Sym), + nb_getval(Sym, Trie), + setof(X, trie_literal(Trie, X), Edges). + +trie_literal(Trie, X) :- + trie_traverse(Trie,Ref), + trie_get_entry(Ref, List), + member(X, List). + +%%%%%%%% +% number of conjunctions in the tree +%%%%%%%%% + +count_ptree(ID,N) :- + sym(ID,Sym), + nb_getval(Sym, Trie), + trie_usage(Trie, N, _, _). + +%%%%%%%% +% check whether some branch of ptree is a subset of conjunction List +% useful for pruning the search for proofs (optional due to time overhead) +% currently not implemented, just fails +%%%%%%% + +prune_check_ptree(_List,_TreeID) :- + format(user,'FAIL: prune check currently not supported~n',[]), + flush_output(user), + fail. + +%%%%%%%%%%%%% +% merge two ptrees +% - take care not to loose proper prefixes that are proofs! +%%%%%%%%%%%%%%% + +merge_ptree(ID1,_,ID3) :- + sym(ID1,Sym1), + sym(ID3,Sym3), + nb_getval(Sym1, T1), + trie_check_entry(T1, true, _), + !, + trie_open(T3), + trie_put_entry(T3, true, _), + nb_setval(Sym3, T3). +merge_ptree(_,ID2,ID3) :- + sym(ID2,Sym2), + sym(ID3,Sym3), + nb_getval(Sym2, T2), + trie_check_entry(T2, true, _), + !, + trie_open(T3), + trie_put_entry(T3, true, _), + nb_setval(Sym3, T3). +merge_ptree(ID1,ID2,ID3) :- + sym(ID1,Sym1), + sym(ID2,Sym2), + sym(ID3,Sym3), + nb_getval(Sym1, T1), + nb_getval(Sym2, T2), + trie_dup(T1, T3), + trie_join(T3,T2), + nb_setval(Sym3, T3). + + +%%%%%%%%%%%%%%%%%%%%%%%% +% write BDD info for given ptree to file +% - initializes leaf BDDs (=variables) first +% - then compresses ptree to exploit subtree sharing +% - bdd_pt/1 does the work on the structure itself +%%%%%%%%%%%%%%%%%%%%%%%% + +bdd_ptree(ID,FileBDD,FileParam) :- + bdd_ptree_script(ID,FileBDD,FileParam), + eraseall(map). + +% version returning variable mapping +bdd_ptree_map(ID,FileBDD,FileParam,Mapping) :- + bdd_ptree_script(ID,FileBDD,FileParam), + findall(X,recorded(map,X,_),Map), + add_probs(Map,Mapping), + eraseall(map). + +add_probs([],[]). +add_probs([m(A,Name)|Map],[m(A,Name,Prob)|Mapping]) :- + problog:get_fact_probability(A,Prob), + add_probs(Map,Mapping). + +% number of variables may be to high: +% counted on trie, but conversion to old tree representation +% transforms A*B+A to A (prefix-test) +bdd_ptree_script(ID,FileBDD,FileParam) :- + edges_ptree(ID,Edges), + tell(FileParam), + bdd_vars_script(Edges), + flush_output, + told, + length(Edges,VarCount), + assert(c_num(1)), + bdd_pt(ID,CT), + c_num(NN), + IntermediateSteps is NN-1, + tell(FileBDD), + format('@BDD1~n~w~n~w~n~w~n',[VarCount,0,IntermediateSteps]), + output_compressed_script(CT), + told, + retractall(c_num(_)), + retractall(compression(_,_)). + +% write parameter file by iterating over all var/not(var) occuring in the tree +bdd_vars_script(Edges) :- + bdd_vars_script(Edges,0). +bdd_vars_script([],_). +bdd_vars_script([A|B],N) :- + problog:get_fact_probability(A,P), + get_var_name(A,NameA), + format('@~w~n~12f~n',[NameA,P]), + NN is N+1, + bdd_vars_script(B,NN). + +%%%%%%%%%%%%%%%%%%%%%%%% +% find top level symbol for script +%%%%%%%%%%%%%%%%%%%%%%%% + +% special cases: variable-free formulae +bdd_pt(ID,false) :- + empty_ptree(ID), + !, + once(retractall(c_num(_))), + once(assert(c_num(2))). +bdd_pt(ID,true) :- + sym(ID,Sym), + nb_getval(Sym, Trie), + trie_check_entry(Trie, true, _), + !, + once(retractall(c_num(_))), + once(assert(c_num(2))). + +% general case: transform trie to nested tree structure for compression +bdd_pt(ID,CT) :- + sym(ID,Sym), + nb_getval(Sym, Trie), + trie_to_tree(Trie, Tree), + compress_pt(Tree,CT). + +trie_to_tree(Trie, Tree) :- + findall(Path,trie_path(Trie, Path), Paths), + add_trees(Paths, [], Tree). + +add_trees([], Tree, Tree). +add_trees([List|Paths], Tree0, Tree) :- + ins_pt(List, Tree0, TreeI), + add_trees(Paths, TreeI, Tree). + +ins_pt([],_T,[]) :- !. +ins_pt([A|B],[s(A1,AT)|OldT],NewT) :- + compare(Comp, A1, A), + (Comp == = -> + (AT == [] -> + NewT=[s(A1,AT)|OldT] + ; + NewT = [s(A1,NewAT)|OldT], + ins_pt(B, AT, NewAT)) + ; + Comp == > -> + NewT = [s(A1,AT)|Tree], + ins_pt([A|B], OldT, Tree) + ; + NewT = [s(A,BTree),s(A1,AT)|OldT], + ins_pt(B,[],BTree) + ). +ins_pt([A|B],[],[s(A,NewAT)]) :- + ins_pt(B,[],NewAT). + +%%%%%%%%%%%% +% BDD compression: alternates and- and or-levels to build BDD bottom-up +% each sub-BDD will be either a conjunction of a one-node BDD with some BDD or a disjunction of BDDs +% uses the internal database to temporarily store a map of components +%%%%%%%%%%%% + +% T is completely compressed and contains single variable +% i.e. T of form x12 +compress_pt(T,TT) :- + atom(T), + test_var_name(T), + !, + get_next_name(TT), + assertz(compression(TT,[T])). +% T is completely compressed and contains subtrees +% i.e. T of form 'L56' +compress_pt(T,T) :- + atom(T). +% T not yet compressed +% i.e. T is a tree-term (nested list & s/2 structure) +% -> execute one layer of compression, then check again +compress_pt(T,CT) :- + \+ atom(T), + and_or_compression(T,IT), + compress_pt(IT,CT). + +% transform tree-term T into tree-term CT where last two layers have been processed +% i.e. introduce names for subparts (-> Map) and replace (all occurrenes of) subparts by this names +and_or_compression(T,CT) :- + and_comp(T,AT), + or_comp(AT,CT). + +% replace leaves that are single child by variable representing father-AND-child +and_comp(T,AT) :- + all_leaves_pt(T,Leaves), + compression_mapping(Leaves,Map), + replace_pt(T,Map,AT). + +% replace list of siblings by variable representing their disjunction +or_comp(T,AT) :- + all_leaflists_pt(T,Leaves), + compression_mapping(Leaves,Map), + replace_pt(T,Map,AT). + +all_leaves_pt(T,L) :- + all(X,some_leaf_pt(T,X),L). + +some_leaf_pt([s(A,[])|_],s(A,[])). +some_leaf_pt([s(A,L)|_],s(A,L)) :- + atom(L). +some_leaf_pt([s(_,L)|_],X) :- + some_leaf_pt(L,X). +some_leaf_pt([_|L],X) :- + some_leaf_pt(L,X). + +all_leaflists_pt(L,[L]) :- + atomlist(L),!. +all_leaflists_pt(T,L) :- + all(X,some_leaflist_pt(T,X),L),!. +all_leaflists_pt(_,[]). + +some_leaflist_pt([s(_,L)|_],L) :- + atomlist(L). +some_leaflist_pt([s(_,L)|_],X) :- + some_leaflist_pt(L,X). +some_leaflist_pt([_|L],X) :- + some_leaflist_pt(L,X). + +atomlist([]). +atomlist([A|B]) :- + atom(A), + atomlist(B). + +% for each subtree that will be compressed, add its name +% only introduce 'L'-based names when subtree composes elements, store these in compression/2 for printing the script +compression_mapping([],[]). +compression_mapping([First|B],[N-First|BB]) :- + ( + First = s(A,[]) % subtree is literal -> use variable's name x17 from map + -> + recorded(map,m(A,N),_) + ; + (First = s(A,L),atom(L)) % subtree is node with single completely reduced child -> use next 'L'-based name + -> (get_next_name(N), + assertz(compression(N,s(A,L)))) + ; + (First = [L],atom(L)) % subtree is an OR with a single completely reduced element -> use element's name + -> N=L + ; + (atomlist(First), % subtree is an OR with only (>1) completely reduced elements -> use next 'L'-based name + get_next_name(N), + assertz(compression(N,First))) + ), + compression_mapping(B,BB). + + + +% replace_pt(+T,+Map,-NT) +% given the tree-term T and the Map of Name-Subtree entries, replace each occurence of Subtree in T with Name -> result NT +replace_pt(T,[],T). +replace_pt([],_,[]). +replace_pt(L,M,R) :- + atomlist(L), + member(R-L,M), + !. +replace_pt([L|LL],[M|MM],R) :- + replace_pt_list([L|LL],[M|MM],R). + +replace_pt_list([T|Tree],[M|Map],[C|Compr]) :- + replace_pt_single(T,[M|Map],C), + replace_pt_list(Tree,[M|Map],Compr). +replace_pt_list([],_,[]). + +replace_pt_single(s(A,T),[M|Map],Res) :- + atomlist(T), + member(Res-s(A,T),[M|Map]), + !. +replace_pt_single(s(A,T),[M|Map],s(A,Res)) :- + atomlist(T), + member(Res-T,[M|Map]), + !. +replace_pt_single(s(A,T),[M|Map],Res) :- + member(Res-s(A,T),[M|Map]), + !. +replace_pt_single(s(A,T),[M|Map],s(A,TT)) :- + replace_pt_list(T,[M|Map],TT). +replace_pt_single(A,_,A) :- + atom(A). + +%%%%%%%%%%%% +% output for script +% input argument is compressed tree, i.e. true/false or name assigned in last compression step +%%%%%%%%%%%% +output_compressed_script(false) :- + !, + format('L1 = FALSE~nL1~n',[]). +output_compressed_script(true) :- + !, + format('L1 = TRUE~nL1~n',[]). +% for each name-subtree pair, write corresponding line to script, e.g. L17 = x4 * L16 +% stop after writing definition of root (last entry in compression/2), add it's name to mark end of script +output_compressed_script(T) :- + once(retract(compression(Short,Long))), + (T = Short -> + format('~w = ',[Short]), + format_compression_script(Long), + format('~w~n',[Short]) + ; + format('~w = ',[Short]), + format_compression_script(Long), + output_compressed_script(T)). + +format_compression_script(s(A,B)) :- + recorded(map,m(A,C),_), + format('~w * ~w~n',[C,B]). +format_compression_script([A]) :- + format('~w~n',[A]). +format_compression_script([A,B|C]) :- + format('~w + ',[A]), + format_compression_script([B|C]). + +%%%%%%%%%%%%%%%%%%%%%%%% +% auxiliaries for translation to BDD +%%%%%%%%%%%%%%%%%%%%%%%% + +% prefix the current counter with "L" +get_next_name(Name) :- + retract(c_num(N)), + NN is N+1, + assert(c_num(NN)), + atomic_concat('L',N,Name). + +% create BDD-var as fact id prefixed by x +% learning.yap relies on this format! +% when changing, also adapt test_var_name/1 below +get_var_name(A,NameA) :- + atomic_concat([x,A],NameA), + recorda(map,m(A,NameA),_). + +% test used by base case of compression mapping to detect single-variable tree +% has to match above naming scheme +test_var_name(T) :- + atomic_concat(x,_,T). \ No newline at end of file diff --git a/ProbLog/simplecudd/Example.c b/ProbLog/simplecudd/Example.c new file mode 100644 index 000000000..c0dab075f --- /dev/null +++ b/ProbLog/simplecudd/Example.c @@ -0,0 +1,281 @@ +/******************************************************************************\ +* * +* SimpleCUDD library (www.cs.kuleuven.be/~theo/tools/simplecudd.html) * +* SimpleCUDD was developed at Katholieke Universiteit Leuven(www.kuleuven.be) * +* * +* Copyright T. Mantadelis and Katholieke Universiteit Leuven 2008 * +* * +* Author: Theofrastos Mantadelis * +* File: Example.c * +* * +******************************************************************************** +* * +* The "Artistic License" * +* * +* Preamble * +* * +* The intent of this document is to state the conditions under which a * +* Package may be copied, such that the Copyright Holder maintains some * +* semblance of artistic control over the development of the package, * +* while giving the users of the package the right to use and distribute * +* the Package in a more-or-less customary fashion, plus the right to make * +* reasonable modifications. * +* * +* Definitions: * +* * +* "Package" refers to the collection of files distributed by the * +* Copyright Holder, and derivatives of that collection of files * +* created through textual modification. * +* * +* "Standard Version" refers to such a Package if it has not been * +* modified, or has been modified in accordance with the wishes * +* of the Copyright Holder as specified below. * +* * +* "Copyright Holder" is whoever is named in the copyright or * +* copyrights for the package. * +* * +* "You" is you, if you're thinking about copying or distributing * +* this Package. * +* * +* "Reasonable copying fee" is whatever you can justify on the * +* basis of media cost, duplication charges, time of people involved, * +* and so on. (You will not be required to justify it to the * +* Copyright Holder, but only to the computing community at large * +* as a market that must bear the fee.) * +* * +* "Freely Available" means that no fee is charged for the item * +* itself, though there may be fees involved in handling the item. * +* It also means that recipients of the item may redistribute it * +* under the same conditions they received it. * +* * +* 1. You may make and give away verbatim copies of the source form of the * +* Standard Version of this Package without restriction, provided that you * +* duplicate all of the original copyright notices and associated disclaimers. * +* * +* 2. You may apply bug fixes, portability fixes and other modifications * +* derived from the Public Domain or from the Copyright Holder. A Package * +* modified in such a way shall still be considered the Standard Version. * +* * +* 3. You may otherwise modify your copy of this Package in any way, provided * +* that you insert a prominent notice in each changed file stating how and * +* when you changed that file, and provided that you do at least ONE of the * +* following: * +* * +* a) place your modifications in the Public Domain or otherwise make them * +* Freely Available, such as by posting said modifications to Usenet or * +* an equivalent medium, or placing the modifications on a major archive * +* site such as uunet.uu.net, or by allowing the Copyright Holder to include * +* your modifications in the Standard Version of the Package. * +* * +* b) use the modified Package only within your corporation or organization. * +* * +* c) rename any non-standard executables so the names do not conflict * +* with standard executables, which must also be provided, and provide * +* a separate manual page for each non-standard executable that clearly * +* documents how it differs from the Standard Version. * +* * +* d) make other distribution arrangements with the Copyright Holder. * +* * +* 4. You may distribute the programs of this Package in object code or * +* executable form, provided that you do at least ONE of the following: * +* * +* a) distribute a Standard Version of the executables and library files, * +* together with instructions (in the manual page or equivalent) on where * +* to get the Standard Version. * +* * +* b) accompany the distribution with the machine-readable source of * +* the Package with your modifications. * +* * +* c) give non-standard executables non-standard names, and clearly * +* document the differences in manual pages (or equivalent), together * +* with instructions on where to get the Standard Version. * +* * +* d) make other distribution arrangements with the Copyright Holder. * +* * +* 5. You may charge a reasonable copying fee for any distribution of this * +* Package. You may charge any fee you choose for support of this * +* Package. You may not charge a fee for this Package itself. However, * +* you may distribute this Package in aggregate with other (possibly * +* commercial) programs as part of a larger (possibly commercial) software * +* distribution provided that you do not advertise this Package as a * +* product of your own. You may embed this Package's interpreter within * +* an executable of yours (by linking); this shall be construed as a mere * +* form of aggregation, provided that the complete Standard Version of the * +* interpreter is so embedded. * +* * +* 6. The scripts and library files supplied as input to or produced as * +* output from the programs of this Package do not automatically fall * +* under the copyright of this Package, but belong to whoever generated * +* them, and may be sold commercially, and may be aggregated with this * +* Package. If such scripts or library files are aggregated with this * +* Package via the so-called "undump" or "unexec" methods of producing a * +* binary executable image, then distribution of such an image shall * +* neither be construed as a distribution of this Package nor shall it * +* fall under the restrictions of Paragraphs 3 and 4, provided that you do * +* not represent such an executable image as a Standard Version of this * +* Package. * +* * +* 7. C subroutines (or comparably compiled subroutines in other * +* languages) supplied by you and linked into this Package in order to * +* emulate subroutines and variables of the language defined by this * +* Package shall not be considered part of this Package, but are the * +* equivalent of input as in Paragraph 6, provided these subroutines do * +* not change the language in any way that would cause it to fail the * +* regression tests for the language. * +* * +* 8. Aggregation of this Package with a commercial distribution is always * +* permitted provided that the use of this Package is embedded; that is, * +* when no overt attempt is made to make this Package's interfaces visible * +* to the end user of the commercial distribution. Such use shall not be * +* construed as a distribution of this Package. * +* * +* 9. The name of the Copyright Holder may not be used to endorse or promote * +* products derived from this software without specific prior written * +* permission. * +* * +* 10. THIS PACKAGE IS PROVIDED "AS IS" AND WITHOUT ANY EXPRESS OR * +* IMPLIED WARRANTIES, INCLUDING, WITHOUT LIMITATION, THE IMPLIED * +* WARRANTIES OF MERCHANTIBILITY AND FITNESS FOR A PARTICULAR PURPOSE. * +* * +* The End * +* * +\******************************************************************************/ + + +#include "simplecudd.h" + +typedef struct _extmanager { + DdManager *manager; + DdNode *t, *f; + hisqueue *his; + namedvars varmap; +} extmanager; + +void DFS(extmanager MyManager, DdNode *Current); +int compexpand(extmanager MyManager, DdNode *Current, extmanager MyManager2, DdNode *Current2); +int bufstrcat(char *targetstr, int targetmem, const char *srcstr); +void getalltruepaths(extmanager MyManager, DdNode *Current, const char *startpath, const char *prevvar); + +int main(int argc, char **arg) { + extmanager MyManager; + DdNode *bdd; + bddfileheader fileheader; + int code; + char yn; + code = -1; + if (argc != 2) { + fprintf(stderr, "\nUsage: %s [filename]\nGenerates and traverses a BDD from file\n", arg[0]); + fprintf(stderr, "\nUsage: %s -online\nGenerates and traverses a BDD online mode\n", arg[0]); + return code; + } + RAPIDLOADON; + if (strcmp("-online", arg[1]) == 0) { + MyManager.manager = simpleBDDinit(0); + MyManager.t = HIGH(MyManager.manager); + MyManager.f = LOW(MyManager.manager); + MyManager.varmap = InitNamedVars(1, 0); + bdd = OnlineGenerateBDD(MyManager.manager, &MyManager.varmap); + } else { + fileheader = ReadFileHeader(arg[1]); + switch(fileheader.filetype) { + case BDDFILE_SCRIPT: + MyManager.manager = simpleBDDinit(fileheader.varcnt); + MyManager.t = HIGH(MyManager.manager); + MyManager.f = LOW(MyManager.manager); + MyManager.varmap = InitNamedVars(fileheader.varcnt, fileheader.varstart); + bdd = FileGenerateBDD(MyManager.manager, MyManager.varmap, fileheader); + break; + case BDDFILE_NODEDUMP: + MyManager.manager = simpleBDDinit(fileheader.varcnt); + MyManager.t = HIGH(MyManager.manager); + MyManager.f = LOW(MyManager.manager); + MyManager.varmap = InitNamedVars(fileheader.varcnt, fileheader.varstart); + bdd = LoadNodeDump(MyManager.manager, MyManager.varmap, fileheader.inputfile); + break; + default: + fprintf(stderr, "Error: not a valid file format to load.\n"); + return code; + break; + } + } + if (bdd != NULL) { + printf("Do you want to load parameter values from testdata.txt [y]? "); yn = getchar(); getchar(); + if (yn == 'y') LoadVariableData(MyManager.varmap, "testdata.txt"); + code = 0; + MyManager.his = InitHistory(GetVarCount(MyManager.manager)); + if (strcmp("-online", arg[1]) != 0) { + DFS(MyManager, bdd); + printf("Do you need an export [y]? "); yn = getchar(); getchar(); + if (yn == 'y') simpleNamedBDDtoDot(MyManager.manager, MyManager.varmap, bdd, "SimpleCUDDExport.dot"); + printf("Do you want a save [y]? "); yn = getchar(); getchar(); + if (yn == 'y') SaveNodeDump(MyManager.manager, MyManager.varmap, bdd, "SimpleCUDDSave.sav"); + printf("Do you want to see all true paths [y]? "); yn = getchar(); getchar(); + if (yn == 'y') { + ReInitHistory(MyManager.his, GetVarCount(MyManager.manager)); + getalltruepaths(MyManager, bdd, "", ""); + } + } else { + onlinetraverse(MyManager.manager, MyManager.varmap, MyManager.his, bdd); + } + } + if (MyManager.manager != NULL) KillBDD(MyManager.manager); + return code; +} + +void DFS(extmanager MyManager, DdNode *Current) { + DdNode *h, *l; + hisnode *Found; + char *curnode; + curnode = GetNodeVarNameDisp(MyManager.manager, MyManager.varmap, Current); + if (GetIndex(Current) < MyManager.varmap.varcnt) { + printf("%s(%f,%i,%s)\n", curnode, MyManager.varmap.dvalue[GetIndex(Current)], MyManager.varmap.ivalue[GetIndex(Current)], MyManager.varmap.dynvalue[GetIndex(Current)]); + } else { + printf("%s\n", curnode); + } + if ((Current != MyManager.t) && (Current != MyManager.f) && + ((Found = GetNode(MyManager.his, MyManager.varmap.varstart, Current)) == NULL)) { + l = LowNodeOf(MyManager.manager, Current); + h = HighNodeOf(MyManager.manager, Current); + printf("l(%s)->", curnode); + DFS(MyManager, l); + printf("h(%s)->", curnode); + DFS(MyManager, h); + AddNode(MyManager.his, MyManager.varmap.varstart, Current, 0.0, 0, NULL); + } +} + +void getalltruepaths(extmanager MyManager, DdNode *Current, const char *startpath, const char *prevvar) { + DdNode *h, *l; + char *curnode, *curpath; + int pathmaxsize = 1024; + curpath = (char *) malloc(sizeof(char) * pathmaxsize); + curpath[0] = '\0'; + pathmaxsize = bufstrcat(curpath, pathmaxsize, startpath); + pathmaxsize = bufstrcat(curpath, pathmaxsize, prevvar); + pathmaxsize = bufstrcat(curpath, pathmaxsize, "*"); + curnode = GetNodeVarNameDisp(MyManager.manager, MyManager.varmap, Current); + if (Current == MyManager.t) { + printf("%s\n", curpath); + } else if (Current != MyManager.f) { + h = HighNodeOf(MyManager.manager, Current); + if (h != MyManager.f) { + getalltruepaths(MyManager, h, curpath, curnode); + } + l = LowNodeOf(MyManager.manager, Current); + if (l != MyManager.f) { + pathmaxsize = bufstrcat(curpath, pathmaxsize, "~"); + getalltruepaths(MyManager, l, curpath, curnode); + } + } + free(curpath); +} + +int bufstrcat(char *targetstr, int targetmem, const char *srcstr) { + int strinc = strlen(srcstr), strsize = strlen(targetstr); + while ((strsize + strinc) > (targetmem - 1)) { + targetmem *= 2; + targetstr = (char *) realloc(targetstr, sizeof(char) * targetmem); + } + strcat(targetstr, srcstr); + return targetmem; +} diff --git a/ProbLog/simplecudd/LICENCE b/ProbLog/simplecudd/LICENCE new file mode 100644 index 000000000..5f221241e --- /dev/null +++ b/ProbLog/simplecudd/LICENCE @@ -0,0 +1,131 @@ + + + + + The "Artistic License" + + Preamble + +The intent of this document is to state the conditions under which a +Package may be copied, such that the Copyright Holder maintains some +semblance of artistic control over the development of the package, +while giving the users of the package the right to use and distribute +the Package in a more-or-less customary fashion, plus the right to make +reasonable modifications. + +Definitions: + + "Package" refers to the collection of files distributed by the + Copyright Holder, and derivatives of that collection of files + created through textual modification. + + "Standard Version" refers to such a Package if it has not been + modified, or has been modified in accordance with the wishes + of the Copyright Holder as specified below. + + "Copyright Holder" is whoever is named in the copyright or + copyrights for the package. + + "You" is you, if you're thinking about copying or distributing + this Package. + + "Reasonable copying fee" is whatever you can justify on the + basis of media cost, duplication charges, time of people involved, + and so on. (You will not be required to justify it to the + Copyright Holder, but only to the computing community at large + as a market that must bear the fee.) + + "Freely Available" means that no fee is charged for the item + itself, though there may be fees involved in handling the item. + It also means that recipients of the item may redistribute it + under the same conditions they received it. + +1. You may make and give away verbatim copies of the source form of the +Standard Version of this Package without restriction, provided that you +duplicate all of the original copyright notices and associated disclaimers. + +2. You may apply bug fixes, portability fixes and other modifications +derived from the Public Domain or from the Copyright Holder. A Package +modified in such a way shall still be considered the Standard Version. + +3. You may otherwise modify your copy of this Package in any way, provided +that you insert a prominent notice in each changed file stating how and +when you changed that file, and provided that you do at least ONE of the +following: + + a) place your modifications in the Public Domain or otherwise make them + Freely Available, such as by posting said modifications to Usenet or + an equivalent medium, or placing the modifications on a major archive + site such as uunet.uu.net, or by allowing the Copyright Holder to include + your modifications in the Standard Version of the Package. + + b) use the modified Package only within your corporation or organization. + + c) rename any non-standard executables so the names do not conflict + with standard executables, which must also be provided, and provide + a separate manual page for each non-standard executable that clearly + documents how it differs from the Standard Version. + + d) make other distribution arrangements with the Copyright Holder. + +4. You may distribute the programs of this Package in object code or +executable form, provided that you do at least ONE of the following: + + a) distribute a Standard Version of the executables and library files, + together with instructions (in the manual page or equivalent) on where + to get the Standard Version. + + b) accompany the distribution with the machine-readable source of + the Package with your modifications. + + c) give non-standard executables non-standard names, and clearly + document the differences in manual pages (or equivalent), together + with instructions on where to get the Standard Version. + + d) make other distribution arrangements with the Copyright Holder. + +5. You may charge a reasonable copying fee for any distribution of this +Package. You may charge any fee you choose for support of this +Package. You may not charge a fee for this Package itself. However, +you may distribute this Package in aggregate with other (possibly +commercial) programs as part of a larger (possibly commercial) software +distribution provided that you do not advertise this Package as a +product of your own. You may embed this Package's interpreter within +an executable of yours (by linking); this shall be construed as a mere +form of aggregation, provided that the complete Standard Version of the +interpreter is so embedded. + +6. The scripts and library files supplied as input to or produced as +output from the programs of this Package do not automatically fall +under the copyright of this Package, but belong to whoever generated +them, and may be sold commercially, and may be aggregated with this +Package. If such scripts or library files are aggregated with this +Package via the so-called "undump" or "unexec" methods of producing a +binary executable image, then distribution of such an image shall +neither be construed as a distribution of this Package nor shall it +fall under the restrictions of Paragraphs 3 and 4, provided that you do +not represent such an executable image as a Standard Version of this +Package. + +7. C subroutines (or comparably compiled subroutines in other +languages) supplied by you and linked into this Package in order to +emulate subroutines and variables of the language defined by this +Package shall not be considered part of this Package, but are the +equivalent of input as in Paragraph 6, provided these subroutines do +not change the language in any way that would cause it to fail the +regression tests for the language. + +8. Aggregation of this Package with a commercial distribution is always +permitted provided that the use of this Package is embedded; that is, +when no overt attempt is made to make this Package's interfaces visible +to the end user of the commercial distribution. Such use shall not be +construed as a distribution of this Package. + +9. The name of the Copyright Holder may not be used to endorse or promote +products derived from this software without specific prior written permission. + +10. THIS PACKAGE IS PROVIDED "AS IS" AND WITHOUT ANY EXPRESS OR +IMPLIED WARRANTIES, INCLUDING, WITHOUT LIMITATION, THE IMPLIED +WARRANTIES OF MERCHANTIBILITY AND FITNESS FOR A PARTICULAR PURPOSE. + + The End diff --git a/ProbLog/simplecudd/Makefile.in b/ProbLog/simplecudd/Makefile.in new file mode 100644 index 000000000..287f91715 --- /dev/null +++ b/ProbLog/simplecudd/Makefile.in @@ -0,0 +1,34 @@ +CUDD = cudd-2.4.1 +DYNAMIC = +FLAGS = +INCLUDE = -I $(CUDD)/include +LINKFLAGS = -lm +LINKLIBS = $(CUDD)/cudd/libcudd.a $(CUDD)/mtr/libmtr.a $(CUDD)/st/libst.a $(CUDD)/util/libutil.a $(CUDD)/epd/libepd.a + +default: makecudd example problog + +example: Example.o simplecudd.o general.o + @echo Making Example... + @echo Copyright T. Mantadelis and Katholieke Universiteit Leuven 2008 + gcc Example.o simplecudd.o general.o $(LINKLIBS) $(LINKFLAGS) -o Example + +problog: ProblogBDD.o simplecudd.o general.o + @echo Making ProblogBDD... + @echo Copyright T. Mantadelis, A. Kimmig, B. Gutmann and Katholieke Universiteit Leuven 2008 + gcc ProblogBDD.o simplecudd.o general.o $(LINKLIBS) $(LINKFLAGS) -o ProblogBDD + +makecudd: + @(cd $(CUDD); \ + echo Making cudd...; \ + make) + +%.o : %.c + gcc $(FLAGS) $(INCLUDE) $(DYNAMIC) -c $< + +clean: cleancudd + rm -f *.o ProblogBDD Example + +cleancudd: + @(cd $(CUDD); \ + echo Cleaning cudd...; \ + make clean) diff --git a/ProbLog/simplecudd/ProblogBDD.c b/ProbLog/simplecudd/ProblogBDD.c new file mode 100644 index 000000000..687216e20 --- /dev/null +++ b/ProbLog/simplecudd/ProblogBDD.c @@ -0,0 +1,670 @@ +/******************************************************************************\ +* * +* SimpleCUDD library (www.cs.kuleuven.be/~theo/tools/simplecudd.html) * +* SimpleCUDD was developed at Katholieke Universiteit Leuven(www.kuleuven.be) * +* * +* Copyright T. Mantadelis, A. Kimmig, B. Gutmann * +* and Katholieke Universiteit Leuven 2008 * +* * +* Author: Theofrastos Mantadelis, Angelika Kimmig, Bernd Gutmann * +* File: ProblogBDD.c * +* * +******************************************************************************** +* * +* The "Artistic License" * +* * +* Preamble * +* * +* The intent of this document is to state the conditions under which a * +* Package may be copied, such that the Copyright Holder maintains some * +* semblance of artistic control over the development of the package, * +* while giving the users of the package the right to use and distribute * +* the Package in a more-or-less customary fashion, plus the right to make * +* reasonable modifications. * +* * +* Definitions: * +* * +* "Package" refers to the collection of files distributed by the * +* Copyright Holder, and derivatives of that collection of files * +* created through textual modification. * +* * +* "Standard Version" refers to such a Package if it has not been * +* modified, or has been modified in accordance with the wishes * +* of the Copyright Holder as specified below. * +* * +* "Copyright Holder" is whoever is named in the copyright or * +* copyrights for the package. * +* * +* "You" is you, if you're thinking about copying or distributing * +* this Package. * +* * +* "Reasonable copying fee" is whatever you can justify on the * +* basis of media cost, duplication charges, time of people involved, * +* and so on. (You will not be required to justify it to the * +* Copyright Holder, but only to the computing community at large * +* as a market that must bear the fee.) * +* * +* "Freely Available" means that no fee is charged for the item * +* itself, though there may be fees involved in handling the item. * +* It also means that recipients of the item may redistribute it * +* under the same conditions they received it. * +* * +* 1. You may make and give away verbatim copies of the source form of the * +* Standard Version of this Package without restriction, provided that you * +* duplicate all of the original copyright notices and associated disclaimers. * +* * +* 2. You may apply bug fixes, portability fixes and other modifications * +* derived from the Public Domain or from the Copyright Holder. A Package * +* modified in such a way shall still be considered the Standard Version. * +* * +* 3. You may otherwise modify your copy of this Package in any way, provided * +* that you insert a prominent notice in each changed file stating how and * +* when you changed that file, and provided that you do at least ONE of the * +* following: * +* * +* a) place your modifications in the Public Domain or otherwise make them * +* Freely Available, such as by posting said modifications to Usenet or * +* an equivalent medium, or placing the modifications on a major archive * +* site such as uunet.uu.net, or by allowing the Copyright Holder to include * +* your modifications in the Standard Version of the Package. * +* * +* b) use the modified Package only within your corporation or organization. * +* * +* c) rename any non-standard executables so the names do not conflict * +* with standard executables, which must also be provided, and provide * +* a separate manual page for each non-standard executable that clearly * +* documents how it differs from the Standard Version. * +* * +* d) make other distribution arrangements with the Copyright Holder. * +* * +* 4. You may distribute the programs of this Package in object code or * +* executable form, provided that you do at least ONE of the following: * +* * +* a) distribute a Standard Version of the executables and library files, * +* together with instructions (in the manual page or equivalent) on where * +* to get the Standard Version. * +* * +* b) accompany the distribution with the machine-readable source of * +* the Package with your modifications. * +* * +* c) give non-standard executables non-standard names, and clearly * +* document the differences in manual pages (or equivalent), together * +* with instructions on where to get the Standard Version. * +* * +* d) make other distribution arrangements with the Copyright Holder. * +* * +* 5. You may charge a reasonable copying fee for any distribution of this * +* Package. You may charge any fee you choose for support of this * +* Package. You may not charge a fee for this Package itself. However, * +* you may distribute this Package in aggregate with other (possibly * +* commercial) programs as part of a larger (possibly commercial) software * +* distribution provided that you do not advertise this Package as a * +* product of your own. You may embed this Package's interpreter within * +* an executable of yours (by linking); this shall be construed as a mere * +* form of aggregation, provided that the complete Standard Version of the * +* interpreter is so embedded. * +* * +* 6. The scripts and library files supplied as input to or produced as * +* output from the programs of this Package do not automatically fall * +* under the copyright of this Package, but belong to whoever generated * +* them, and may be sold commercially, and may be aggregated with this * +* Package. If such scripts or library files are aggregated with this * +* Package via the so-called "undump" or "unexec" methods of producing a * +* binary executable image, then distribution of such an image shall * +* neither be construed as a distribution of this Package nor shall it * +* fall under the restrictions of Paragraphs 3 and 4, provided that you do * +* not represent such an executable image as a Standard Version of this * +* Package. * +* * +* 7. C subroutines (or comparably compiled subroutines in other * +* languages) supplied by you and linked into this Package in order to * +* emulate subroutines and variables of the language defined by this * +* Package shall not be considered part of this Package, but are the * +* equivalent of input as in Paragraph 6, provided these subroutines do * +* not change the language in any way that would cause it to fail the * +* regression tests for the language. * +* * +* 8. Aggregation of this Package with a commercial distribution is always * +* permitted provided that the use of this Package is embedded; that is, * +* when no overt attempt is made to make this Package's interfaces visible * +* to the end user of the commercial distribution. Such use shall not be * +* construed as a distribution of this Package. * +* * +* 9. The name of the Copyright Holder may not be used to endorse or promote * +* products derived from this software without specific prior written * +* permission. * +* * +* 10. THIS PACKAGE IS PROVIDED "AS IS" AND WITHOUT ANY EXPRESS OR * +* IMPLIED WARRANTIES, INCLUDING, WITHOUT LIMITATION, THE IMPLIED * +* WARRANTIES OF MERCHANTIBILITY AND FITNESS FOR A PARTICULAR PURPOSE. * +* * +* The End * +* * +\******************************************************************************/ + + +#include "simplecudd.h" +#include + +typedef struct _parameters { + int loadfile; + int savedfile; + int exportfile; + int inputfile; + int debug; + int errorcnt; + int *error; + int method; + int queryid; + int timeout; + double sigmoid_slope; + int online; + int maxbufsize; + char *ppid; +} parameters; + +typedef struct _gradientpair { + double probability; + double gradient; +} gradientpair; + +typedef struct _extmanager { + DdManager *manager; + DdNode *t, *f; + hisqueue *his; + namedvars varmap; +} extmanager; + +int argtype(const char *arg); +void printhelp(int argc, char **arg); +parameters loadparam(int argc, char **arg); +parameters params; + +void handler(int num); +void pidhandler(int num); +void termhandler(int num); + +double sigmoid(double x, double slope); +void myexpand(extmanager MyManager, DdNode *Current); +double CalcProbability(extmanager MyManager, DdNode *Current); +double CalcProbabilitySigmoid(extmanager MyManager, DdNode *Current); +gradientpair CalcGradient(extmanager MyManager, DdNode *Current, int TargetVar, char *TargetPattern); +int patterncalculated(char *pattern, extmanager MyManager, int loc); +char * extractpattern(char *thestr); + +int main(int argc, char **arg) { + extmanager MyManager; + DdNode *bdd; + bddfileheader fileheader; + int i, ivarcnt, code; + gradientpair tvalue; + double probability = -1.0; + char *varpattern; + varpattern = NULL; + code = -1; + params = loadparam(argc, arg); + + if (params.errorcnt > 0) { + printhelp(argc, arg); + for (i = 0; i < params.errorcnt; i++) { + fprintf(stderr, "Error: not known or error at parameter %s.\n", arg[params.error[i]]); + } + return -1; + } + + if (params.online == 0 && params.loadfile == -1) { + printhelp(argc, arg); + fprintf(stderr, "Error: you must specify a loading file.\n"); + return -1; + } + + if (params.method != 0 && arg[params.method][0] != 'g' && arg[params.method][0] != 'p' && arg[params.method][0] != 'o') { + printhelp(argc, arg); + fprintf(stderr, "Error: you must choose a calculation method beetween [p]robability, [g]radient, [o]nline.\n"); + return -1; + } + + if (params.debug) DEBUGON; + RAPIDLOADON; + SETMAXBUFSIZE(params.maxbufsize); + + signal(SIGINT, termhandler); + if (params.ppid != NULL) { + signal(SIGALRM, pidhandler); + alarm(5); + } else { + signal(SIGALRM, handler); + alarm(params.timeout); + } + if (params.online) { + MyManager.manager = simpleBDDinit(0); + MyManager.t = HIGH(MyManager.manager); + MyManager.f = LOW(MyManager.manager); + MyManager.varmap = InitNamedVars(1, 0); + bdd = OnlineGenerateBDD(MyManager.manager, &MyManager.varmap); + ivarcnt = GetVarCount(MyManager.manager); + } else { + fileheader = ReadFileHeader(arg[params.loadfile]); + switch(fileheader.filetype) { + case BDDFILE_SCRIPT: + if (params.inputfile == -1) { + printhelp(argc, arg); + fprintf(stderr, "Error: an input file is necessary for this type of loading file.\n"); + return -1; + } + MyManager.manager = simpleBDDinit(fileheader.varcnt); + MyManager.t = HIGH(MyManager.manager); + MyManager.f = LOW(MyManager.manager); + MyManager.varmap = InitNamedVars(fileheader.varcnt, fileheader.varstart); + bdd = FileGenerateBDD(MyManager.manager, MyManager.varmap, fileheader); + ivarcnt = fileheader.varcnt; + break; + case BDDFILE_NODEDUMP: + if (params.inputfile == -1) { + printhelp(argc, arg); + fprintf(stderr, "Error: an input file is necessary for this type of loading file.\n"); + return -1; + } + MyManager.manager = simpleBDDinit(fileheader.varcnt); + MyManager.t = HIGH(MyManager.manager); + MyManager.f = LOW(MyManager.manager); + MyManager.varmap = InitNamedVars(fileheader.varcnt, fileheader.varstart); + bdd = LoadNodeDump(MyManager.manager, MyManager.varmap, fileheader.inputfile); + ivarcnt = fileheader.varcnt; + break; + default: + fprintf(stderr, "Error: not a valid file format to load.\n"); + return -1; + break; + } + } + alarm(0); + + // problem specifics + + if (bdd != NULL) { + ivarcnt = RepairVarcnt(&MyManager.varmap); + code = 0; + if (params.inputfile != -1) { + if (LoadVariableData(MyManager.varmap, arg[params.inputfile]) == -1) return -1; + if (!all_loaded(MyManager.varmap, 1)) return -1; + } + MyManager.his = InitHistory(ivarcnt); + if (params.method != 0) { + switch(arg[params.method][0]) { + case 'g': + for (i = 0; i < MyManager.varmap.varcnt; i++) { + if (MyManager.varmap.vars[i] != NULL) { + varpattern = extractpattern(MyManager.varmap.vars[i]); + if ((varpattern == NULL) || (!patterncalculated(varpattern, MyManager, i))) { + tvalue = CalcGradient(MyManager, bdd, i + MyManager.varmap.varstart, varpattern); + probability = tvalue.probability; + double factor = sigmoid(MyManager.varmap.dvalue[i], params.sigmoid_slope) * (1 - sigmoid(MyManager.varmap.dvalue[i], params.sigmoid_slope)) * params.sigmoid_slope; + if (varpattern == NULL) { + printf("query_gradient(%s,%s,%1.12f).\n", arg[params.queryid], MyManager.varmap.vars[i], tvalue.gradient * factor); + } else { + varpattern[strlen(varpattern) - 2] = '\0'; + printf("query_gradient(%s,%s,%1.12f).\n", arg[params.queryid], varpattern, tvalue.gradient * factor); + } + ReInitHistory(MyManager.his, MyManager.varmap.varcnt); + } + if (varpattern != NULL) free(varpattern); + } else { + fprintf(stderr, "Error: no variable name given for parameter.\n"); + } + } + if (probability < 0.0) { + // no nodes, so we have to calculate probability ourself + tvalue = CalcGradient(MyManager, bdd, 0 + MyManager.varmap.varstart, NULL); + probability = tvalue.probability; + } + printf("query_probability(%s,%1.12f).\n", arg[params.queryid], probability); + break; + case 'p': + printf("probability(%1.12f).\n", CalcProbability(MyManager, bdd)); + break; + case 'o': + onlinetraverse(MyManager.manager, MyManager.varmap, MyManager.his, bdd); + break; + default: + myexpand(MyManager, bdd); + break; + } + } else { + myexpand(MyManager, bdd); + } + if (params.savedfile > -1) SaveNodeDump(MyManager.manager, MyManager.varmap, bdd, arg[params.savedfile]); + if (params.exportfile > -1) simpleNamedBDDtoDot(MyManager.manager, MyManager.varmap, bdd, arg[params.exportfile]); + ReInitHistory(MyManager.his, MyManager.varmap.varcnt); + free(MyManager.his); + } + if (MyManager.manager != NULL) { + KillBDD(MyManager.manager); + free(MyManager.varmap.dvalue); + free(MyManager.varmap.ivalue); + free(MyManager.varmap.dynvalue); + for (i = 0; i < MyManager.varmap.varcnt; i++) + free(MyManager.varmap.vars[i]); + free(MyManager.varmap.vars); + } + if (params.error != NULL) free(params.error); + + return code; + +} + +/* Shell Parameters handling */ + +int argtype(const char *arg) { + if (strcmp(arg, "-l") == 0 || strcmp(arg, "--load") == 0) return 0; + if (strcmp(arg, "-e") == 0 || strcmp(arg, "--export") == 0) return 2; + if (strcmp(arg, "-m") == 0 || strcmp(arg, "--method") == 0) return 3; + if (strcmp(arg, "-i") == 0 || strcmp(arg, "--input") == 0) return 4; + if (strcmp(arg, "-h") == 0 || strcmp(arg, "--help") == 0) return 5; + if (strcmp(arg, "-d") == 0 || strcmp(arg, "--debug") == 0) return 6; + if (strcmp(arg, "-id") == 0 || strcmp(arg, "--queryid") == 0) return 7; + if (strcmp(arg, "-t") == 0 || strcmp(arg, "--timeout") == 0) return 8; + if (strcmp(arg, "-sd") == 0 || strcmp(arg, "--savedump") == 0) return 9; + if (strcmp(arg, "-sl") == 0 || strcmp(arg, "--slope") == 0) return 10; + if (strcmp(arg, "-o") == 0 || strcmp(arg, "--online") == 0) return 11; + if (strcmp(arg, "-bs") == 0 || strcmp(arg, "--bufsize") == 0) return 12; + if (strcmp(arg, "-pid") == 0 || strcmp(arg, "--pid") == 0) return 13; + return -1; +} + +void printhelp(int argc, char **arg) { + fprintf(stderr, "\nUsage: %s -l [filename] -i [filename] -o (-s(d) [filename] -e [filename] -m [method] -id [queryid] -sl [double]) (-t [seconds] -d -h)\n", arg[0]); + fprintf(stderr, "Generates and traverses a BDD\nMandatory parameters:\n"); + fprintf(stderr, "\t-l [filename]\t->\tfilename to load supports two formats:\n\t\t\t\t\t\t1. script with generation instructions\n\t\t\t\t\t\t2. node dump saved file\n"); + fprintf(stderr, "\t-i [filename]\t->\tfilename to input problem specifics (mandatory with file formats 1, 2)\n"); + fprintf(stderr, "\t-o\t\t->\tgenerates the BDD in online mode instead from a file can be used instead of -l\n"); + fprintf(stderr, "Optional parameters:\n"); + fprintf(stderr, "\t-sd [filename]\t->\tfilename to save generated BDD in node dump format (fast loading, traverse valid only)\n"); + fprintf(stderr, "\t-e [filename]\t->\tfilename to export generated BDD in dot format\n"); + fprintf(stderr, "\t-m [method]\t->\tthe calculation method to be used: none(default), [p]robability, [g]radient, [o]nline\n"); + fprintf(stderr, "\t-id [queryid]\t->\tthe queries identity name (used by gradient) default: %s\n", arg[0]); + fprintf(stderr, "\t-sl [double]\t->\tthe sigmoid slope (used by gradient) default: 1.0\n"); + fprintf(stderr, "Extra parameters:\n"); + fprintf(stderr, "\t-t [seconds]\t->\tthe seconds (int) for BDD generation timeout default 0 = no timeout\n"); + fprintf(stderr, "\t-pid [pid]\t->\ta process id (int) to check for termination default 0 = no process to check works only under POSIX OS\n"); + fprintf(stderr, "\t-bs [bytes]\t->\tthe bytes (int) to use as a maximum buffer size to read files default 0 = no max\n"); + fprintf(stderr, "\t-d\t\t->\tRun in debug mode (gives extra messages in stderr)\n"); + fprintf(stderr, "\t-h\t\t->\tHelp (displays this message)\n\n"); + fprintf(stderr, "Example: %s -l testbdd -i input.txt -m g -id testbdd\n", arg[0]); +} + +parameters loadparam(int argc, char **arg) { + int i; + parameters params; + params.loadfile = -1; + params.savedfile = -1; + params.exportfile = -1; + params.method = 0; + params.inputfile = -1; + params.debug = 0; + params.errorcnt = 0; + params.queryid = 0; + params.timeout = 0; + params.sigmoid_slope = 1.0; + params.online = 0; + params.maxbufsize = 0; + params.ppid = NULL; + params.error = (int *) malloc(argc * sizeof(int)); + for (i = 1; i < argc; i++) { + switch(argtype(arg[i])) { + case 0: + if (argc > i + 1) { + i++; + params.loadfile = i; + } else { + params.error[params.errorcnt] = i; + params.errorcnt++; + } + break; + case 2: + if (argc > i + 1) { + i++; + params.exportfile = i; + } else { + params.error[params.errorcnt] = i; + params.errorcnt++; + } + break; + case 3: + if (argc > i + 1) { + i++; + params.method = i; + } else { + params.error[params.errorcnt] = i; + params.errorcnt++; + } + break; + case 4: + if (argc > i + 1) { + i++; + params.inputfile = i; + } else { + params.error[params.errorcnt] = i; + params.errorcnt++; + } + break; + case 5: + printhelp(argc, arg); + break; + case 6: + params.debug = 1; + break; + case 7: + if (argc > i + 1) { + i++; + params.queryid = i; + } else { + params.error[params.errorcnt] = i; + params.errorcnt++; + } + break; + case 8: + if ((argc > i + 1) && (IsPosNumber(arg[i + 1]))) { + i++; + params.timeout = atoi(arg[i]); + } else { + params.error[params.errorcnt] = i; + params.errorcnt++; + } + break; + case 9: + if (argc > i + 1) { + i++; + params.savedfile = i; + } else { + params.error[params.errorcnt] = i; + params.errorcnt++; + } + break; + case 10: + if ((argc > i + 1) && (IsRealNumber(arg[i + 1]))) { + i++; + params.sigmoid_slope = atof(arg[i]); + } else { + params.error[params.errorcnt] = i; + params.errorcnt++; + } + break; + case 11: + params.online = 1; + break; + case 12: + if ((argc > i + 1) && (IsPosNumber(arg[i + 1]))) { + i++; + params.maxbufsize = atoi(arg[i]); + } else { + params.error[params.errorcnt] = i; + params.errorcnt++; + } + break; + case 13: + if ((argc > i + 1) && (IsPosNumber(arg[i + 1]))) { + i++; + params.ppid = (char *) malloc(sizeof(char) * (strlen(arg[i]) + 1)); + strcpy(params.ppid, arg[i]); + } else { + params.error[params.errorcnt] = i; + params.errorcnt++; + } + break; + default: + params.error[params.errorcnt] = i; + params.errorcnt++; + break; + } + } + return params; +} + +/* Error Handlers */ + +void handler(int num) { + fprintf(stderr, "Error: Timeout %i exceeded.\n", params.timeout); + exit(-1); +} + +void pidhandler(int num) { + char *s; + if (params.timeout > 0) { + params.timeout -= 5; + if (params.timeout <= 0) { + fprintf(stderr, "Error: Timeout exceeded.\n"); + exit(-1); + } + } + s = (char *) malloc(sizeof(char) * (19 + strlen(params.ppid))); + strcpy(s, "ps "); strcat(s, params.ppid); strcat(s, " >/dev/null"); + if (system(s) != 0) exit(4); + signal(SIGALRM, pidhandler); + alarm(5); + free(s); +} + +void termhandler(int num) { + exit(3); +} + +/* General Functions */ + +double sigmoid(double x, double slope) { + return 1 / (1 + exp(-x * slope)); +} + +/* Debugging traverse function */ + +void myexpand(extmanager MyManager, DdNode *Current) { + DdNode *h, *l; + hisnode *Found; + char *curnode; + curnode = GetNodeVarNameDisp(MyManager.manager, MyManager.varmap, Current); + printf("%s\n", curnode); + if ((Current != MyManager.t) && (Current != MyManager.f) && + ((Found = GetNode(MyManager.his, MyManager.varmap.varstart, Current)) == NULL)) { + l = LowNodeOf(MyManager.manager, Current); + h = HighNodeOf(MyManager.manager, Current); + printf("l(%s)->", curnode); + myexpand(MyManager, l); + printf("h(%s)->", curnode); + myexpand(MyManager, h); + AddNode(MyManager.his, MyManager.varmap.varstart, Current, 0.0, 0, NULL); + } +} + +/* Angelikas Algorithm */ + +double CalcProbability(extmanager MyManager, DdNode *Current) { + DdNode *h, *l; + hisnode *Found; + char *curnode; + double lvalue, hvalue, tvalue; + if (params.debug) { + curnode = GetNodeVarNameDisp(MyManager.manager, MyManager.varmap, Current); + fprintf(stderr, "%s\n", curnode); + } + if (Current == MyManager.t) return 1.0; + if (Current == MyManager.f) return 0.0; + if ((Found = GetNode(MyManager.his, MyManager.varmap.varstart, Current)) != NULL) return Found->dvalue; + l = LowNodeOf(MyManager.manager, Current); + h = HighNodeOf(MyManager.manager, Current); + if (params.debug) fprintf(stderr, "l(%s)->", curnode); + lvalue = CalcProbability(MyManager, l); + if (params.debug) fprintf(stderr, "h(%s)->", curnode); + hvalue = CalcProbability(MyManager, h); + tvalue = MyManager.varmap.dvalue[GetIndex(Current) - MyManager.varmap.varstart]; + tvalue = tvalue * hvalue + lvalue * (1.0 - tvalue); + AddNode(MyManager.his, MyManager.varmap.varstart, Current, tvalue, 0, NULL); + return tvalue; +} + +/* Bernds Algorithm */ + +gradientpair CalcGradient(extmanager MyManager, DdNode *Current, int TargetVar, char *TargetPattern) { + DdNode *h, *l; + hisnode *Found; + char *curnode; + gradientpair lvalue, hvalue, tvalue; + double this_probability; + double *gradient; + if (params.debug) { + curnode = GetNodeVarNameDisp(MyManager.manager, MyManager.varmap, Current); + fprintf(stderr, "%s\n", curnode); + } + if (Current == MyManager.t) { + tvalue.probability = 1.0; + tvalue.gradient = 0.0; + return tvalue; + } + if (Current == MyManager.f) { + tvalue.probability = 0.0; + tvalue.gradient = 0.0; + return tvalue; + } + if ((Found = GetNode(MyManager.his, MyManager.varmap.varstart, Current)) != NULL) { + tvalue.probability = Found->dvalue; + tvalue.gradient = *((double *) Found->dynvalue); + return tvalue; + } + l = LowNodeOf(MyManager.manager, Current); + h = HighNodeOf(MyManager.manager, Current); + if (params.debug) fprintf(stderr, "l(%s)->", curnode); + lvalue = CalcGradient(MyManager, l, TargetVar, TargetPattern); + if (params.debug) fprintf(stderr, "h(%s)->", curnode); + hvalue = CalcGradient(MyManager, h, TargetVar, TargetPattern); + this_probability = sigmoid(MyManager.varmap.dvalue[GetIndex(Current) - MyManager.varmap.varstart], params.sigmoid_slope); + tvalue.probability = this_probability * hvalue.probability + (1 - this_probability) * lvalue.probability; + tvalue.gradient = this_probability * hvalue.gradient + (1 - this_probability) * lvalue.gradient; + if ((GetIndex(Current) == TargetVar) || + ((TargetPattern != NULL) && patternmatch(TargetPattern, MyManager.varmap.vars[GetIndex(Current)]))) { + tvalue.gradient += hvalue.probability - lvalue.probability; + } + gradient = (double *) malloc(sizeof(double)); + *gradient = tvalue.gradient; + AddNode(MyManager.his, MyManager.varmap.varstart, Current, tvalue.probability, 0, gradient); + return tvalue; +} + +char * extractpattern(char *thestr) { + char *p; + int i = 0, sl = strlen(thestr); + while((thestr[i] != '_') && (i < sl)) i++; + if (i == sl) return NULL; + i++; + p = (char *) malloc(sizeof(char) * (i + 2)); + strncpy(p, thestr, i); + p[i] = '*'; + p[i + 1] = '\0'; + return p; +} + +int patterncalculated(char *pattern, extmanager MyManager, int loc) { + int i; + if (pattern == NULL) return 0; + for (i = loc - 1; i > -1; i--) + if (patternmatch(pattern, MyManager.varmap.vars[i])) return 1; + return 0; +} diff --git a/ProbLog/simplecudd/SimpleCUDD.pl b/ProbLog/simplecudd/SimpleCUDD.pl new file mode 100644 index 000000000..9fe1ecfb0 --- /dev/null +++ b/ProbLog/simplecudd/SimpleCUDD.pl @@ -0,0 +1,141 @@ +:-use_module(library(system)). +%:-use_module(library(clib)). + +bdd_init(FDO, FDI, PID):- + exec('/home/theo/BDDs/SimpleCUDD/Version4/Example -online', [pipe(FDO), pipe(FDI), std], PID). + %process_create('/home/theo/BDDs/SimpleCUDD/Version3/Example', ['-online'], [stdin(pipe(FDI)), stdout(pipe(FDO)), process(PID)]). + +bdd_commit(FDO, LINE):- + write(FDO, LINE), + write(FDO, '\n'). + +bdd_kill(FDO, FDI, PID, S):- + bdd_commit(FDO, '@e'), + wait(PID, S), + %process_wait(PID, S), + close(FDO), + close(FDI). + +bdd_line([], X, _, L):- + atomic(X), + X \= [], + (bdd_curinter(N) -> + retract(bdd_curinter(N)) + ; + N = 1 + ), + M is N + 1, + assert(bdd_curinter(M)), + atomic_concat(['L', N, '=', X], L). + +bdd_line(L, X, O, NL):- + atomic(X), + X \= [], + atom(L), + L \= [], + atomic_concat([L, O, X], NL). + +bdd_line(L, [], _, L):-!. + +bdd_line(L, [X|T], O, R):- + bdd_line(L, X, O, NL), + bdd_line(NL, T, O, R). + +bdd_AND(L, X, NL):- + bdd_line(L, X, '*', NL). +bdd_OR(L, X, NL):- + bdd_line(L, X, '+', NL). +bdd_XOR(L, X, NL):- + bdd_line(L, X, '#', NL). +bdd_NAND(L, X, NL):- + bdd_line(L, X, '~*', NL). +bdd_NOR(L, X, NL):- + bdd_line(L, X, '~+', NL). +bdd_XNOR(L, X, NL):- + bdd_line(L, X, '~#', NL). + +bdd_not(X, NX):- + atomic(X), + atomic_concat(['~', X], NX). + +bdd_laststep(L):- + bdd_curinter(N), + M is N - 1, + atomic_concat(['L', M], L), + !. + +bdd_nextDFS(FDO):- + bdd_commit(FDO, '@n'). + +bdd_nextBFS(FDO):- + bdd_commit(FDO, '@n,BFS'). + +bdd_current(FDO, FDI, N, Qcnt):- + bdd_commit(FDO, '@c'), + read(FDI, F), + assert(F), + bdd_temp_value(N, Qcnt), + retract(F). + +bdd_highnodeof(FDO, FDI, H):- + bdd_commit(FDO, '@h'), + read(FDI, F), + assert(F), + bdd_temp_value(H), + retract(F). + +bdd_lownodeof(FDO, FDI, L):- + bdd_commit(FDO, '@l'), + read(FDI, F), + assert(F), + bdd_temp_value(L), + retract(F). + +bdd_nodevaluesof(FDO, FDI, N, V):- + atomic_concat(['@v,', N], Q), + bdd_commit(FDO, Q), + read(FDI, F), + assert(F), + bdd_temp_value(V), + retract(F). +/* +bdd_addnodetohis(FDO, N, [D, I, Dyn]):- + atomic_concat(['@a,', N, ',', D, ',', I, ',', Dyn], Q), + bdd_commit(FDO, Q). + +bdd_getnodefromhis(FDO, FDI, N, V):- + atomic_concat(['@g,', N], Q), + bdd_commit(FDO, Q), + read(FDI, F), + assert(F), + bdd_temp_value(V), + retract(F). +*/ + +runme:- + bdd_init(FDO, FDI, PID), + bdd_AND([], ['A', 'B', 'C', 'D', 'E'], L1), + bdd_laststep(L1S), + bdd_commit(FDO, L1), + bdd_AND([], ['A', 'F', 'G', '~B'], L2), + bdd_laststep(L2S), + bdd_commit(FDO, L2), + bdd_AND([], ['A', 'F', 'G', '~C'], L3), + bdd_laststep(L3S), + bdd_commit(FDO, L3), + bdd_OR([], [L1S, L2S, L3S], L4), + bdd_laststep(L4S), + bdd_commit(FDO, L4), + bdd_commit(FDO, L4S), + + repeat, + bdd_current(FDO, FDI, N, I), + write(1),nl, + bdd_nodevaluesof(FDO, FDI, N, V), + write(N), write(' ('), write(V), write(')'), nl, + bdd_next(FDO), + I = 0, (N = 'TRUE' ; N = 'FALSE'), + + bdd_kill(FDO, FDI, PID, S), + write('BDD terminated with state: '), write(S), nl. + diff --git a/ProbLog/simplecudd/general.c b/ProbLog/simplecudd/general.c new file mode 100644 index 000000000..64a7f88e1 --- /dev/null +++ b/ProbLog/simplecudd/general.c @@ -0,0 +1,234 @@ +/******************************************************************************\ +* * +* SimpleCUDD library (www.cs.kuleuven.be/~theo/tools/simplecudd.html) * +* SimpleCUDD was developed at Katholieke Universiteit Leuven(www.kuleuven.be) * +* * +* Copyright T. Mantadelis and Katholieke Universiteit Leuven 2008 * +* * +* Author: Theofrastos Mantadelis * +* File: general.c * +* * +******************************************************************************** +* * +* The "Artistic License" * +* * +* Preamble * +* * +* The intent of this document is to state the conditions under which a * +* Package may be copied, such that the Copyright Holder maintains some * +* semblance of artistic control over the development of the package, * +* while giving the users of the package the right to use and distribute * +* the Package in a more-or-less customary fashion, plus the right to make * +* reasonable modifications. * +* * +* Definitions: * +* * +* "Package" refers to the collection of files distributed by the * +* Copyright Holder, and derivatives of that collection of files * +* created through textual modification. * +* * +* "Standard Version" refers to such a Package if it has not been * +* modified, or has been modified in accordance with the wishes * +* of the Copyright Holder as specified below. * +* * +* "Copyright Holder" is whoever is named in the copyright or * +* copyrights for the package. * +* * +* "You" is you, if you're thinking about copying or distributing * +* this Package. * +* * +* "Reasonable copying fee" is whatever you can justify on the * +* basis of media cost, duplication charges, time of people involved, * +* and so on. (You will not be required to justify it to the * +* Copyright Holder, but only to the computing community at large * +* as a market that must bear the fee.) * +* * +* "Freely Available" means that no fee is charged for the item * +* itself, though there may be fees involved in handling the item. * +* It also means that recipients of the item may redistribute it * +* under the same conditions they received it. * +* * +* 1. You may make and give away verbatim copies of the source form of the * +* Standard Version of this Package without restriction, provided that you * +* duplicate all of the original copyright notices and associated disclaimers. * +* * +* 2. You may apply bug fixes, portability fixes and other modifications * +* derived from the Public Domain or from the Copyright Holder. A Package * +* modified in such a way shall still be considered the Standard Version. * +* * +* 3. You may otherwise modify your copy of this Package in any way, provided * +* that you insert a prominent notice in each changed file stating how and * +* when you changed that file, and provided that you do at least ONE of the * +* following: * +* * +* a) place your modifications in the Public Domain or otherwise make them * +* Freely Available, such as by posting said modifications to Usenet or * +* an equivalent medium, or placing the modifications on a major archive * +* site such as uunet.uu.net, or by allowing the Copyright Holder to include * +* your modifications in the Standard Version of the Package. * +* * +* b) use the modified Package only within your corporation or organization. * +* * +* c) rename any non-standard executables so the names do not conflict * +* with standard executables, which must also be provided, and provide * +* a separate manual page for each non-standard executable that clearly * +* documents how it differs from the Standard Version. * +* * +* d) make other distribution arrangements with the Copyright Holder. * +* * +* 4. You may distribute the programs of this Package in object code or * +* executable form, provided that you do at least ONE of the following: * +* * +* a) distribute a Standard Version of the executables and library files, * +* together with instructions (in the manual page or equivalent) on where * +* to get the Standard Version. * +* * +* b) accompany the distribution with the machine-readable source of * +* the Package with your modifications. * +* * +* c) give non-standard executables non-standard names, and clearly * +* document the differences in manual pages (or equivalent), together * +* with instructions on where to get the Standard Version. * +* * +* d) make other distribution arrangements with the Copyright Holder. * +* * +* 5. You may charge a reasonable copying fee for any distribution of this * +* Package. You may charge any fee you choose for support of this * +* Package. You may not charge a fee for this Package itself. However, * +* you may distribute this Package in aggregate with other (possibly * +* commercial) programs as part of a larger (possibly commercial) software * +* distribution provided that you do not advertise this Package as a * +* product of your own. You may embed this Package's interpreter within * +* an executable of yours (by linking); this shall be construed as a mere * +* form of aggregation, provided that the complete Standard Version of the * +* interpreter is so embedded. * +* * +* 6. The scripts and library files supplied as input to or produced as * +* output from the programs of this Package do not automatically fall * +* under the copyright of this Package, but belong to whoever generated * +* them, and may be sold commercially, and may be aggregated with this * +* Package. If such scripts or library files are aggregated with this * +* Package via the so-called "undump" or "unexec" methods of producing a * +* binary executable image, then distribution of such an image shall * +* neither be construed as a distribution of this Package nor shall it * +* fall under the restrictions of Paragraphs 3 and 4, provided that you do * +* not represent such an executable image as a Standard Version of this * +* Package. * +* * +* 7. C subroutines (or comparably compiled subroutines in other * +* languages) supplied by you and linked into this Package in order to * +* emulate subroutines and variables of the language defined by this * +* Package shall not be considered part of this Package, but are the * +* equivalent of input as in Paragraph 6, provided these subroutines do * +* not change the language in any way that would cause it to fail the * +* regression tests for the language. * +* * +* 8. Aggregation of this Package with a commercial distribution is always * +* permitted provided that the use of this Package is embedded; that is, * +* when no overt attempt is made to make this Package's interfaces visible * +* to the end user of the commercial distribution. Such use shall not be * +* construed as a distribution of this Package. * +* * +* 9. The name of the Copyright Holder may not be used to endorse or promote * +* products derived from this software without specific prior written * +* permission. * +* * +* 10. THIS PACKAGE IS PROVIDED "AS IS" AND WITHOUT ANY EXPRESS OR * +* IMPLIED WARRANTIES, INCLUDING, WITHOUT LIMITATION, THE IMPLIED * +* WARRANTIES OF MERCHANTIBILITY AND FITNESS FOR A PARTICULAR PURPOSE. * +* * +* The End * +* * +\******************************************************************************/ + + +#include "general.h" + +/* Number Handling */ + +int IsRealNumber(char *c) { + int i, l; + l = strlen(c); + if (l <= 0) return 0; + if (l == 1) return IsNumberDigit(c[0]); + for(i = 1; i < strlen(c); i++) { + if (c[i] == '.') return IsPosNumber(&c[i + 1]); + if (!IsNumberDigit(c[i])) return 0; + } + return (IsNumberDigit(c[0]) || IsSignDigit(c[0])); +} + +int IsPosNumber(const char *c) { + int i, l; + l = strlen(c); + if (l <= 0) return 0; + for(i = 0; i < strlen(c); i++) { + if (!IsNumberDigit(c[i])) return 0; + } + return 1; +} + +int IsNumber(const char *c) { + int i, l; + l = strlen(c); + if (l <= 0) return 0; + if (l == 1) return IsNumberDigit(c[0]); + for(i = 1; i < strlen(c); i++) { + if (!IsNumberDigit(c[i])) return 0; + } + return (IsNumberDigit(c[0]) || IsSignDigit(c[0])); +} + +/* File Handling */ + +char * freadstr(FILE *fd, const char *separators) { + char *str; + int buf, icur = 0, max = 10; + str = (char *) malloc(sizeof(char) * max); + str[0] = '\0'; + do { + if ((buf = fgetc(fd)) != EOF) { + if (icur == (max - 1)) { + max = max * 2; + str = (char *) realloc(str, sizeof(char) * max); + } + if (!CharIn((char) buf, separators)) { + str[icur] = (char) buf; + icur++; + str[icur] = '\0'; + } + } + } while(!CharIn(buf, separators) && !feof(fd)); + return str; +} + +int CharIn(const char c, const char *in) { + int i; + for (i = 0; i < strlen(in); i++) + if (c == in[i]) return 1; + return 0; +} + +/* string handling */ + +int patternmatch(char *pattern, char *thestr) { + int i, j = -1, pl = strlen(pattern), sl = strlen(thestr); + for(i = 0; i < pl; i++) { + if (pattern[i] == '*') { + do { + i++; + if (i == pl) return 1; + } while(pattern[i] == '*'); + do { + j++; + if (j >= sl) return 0; + if ((thestr[j] == pattern[i]) && patternmatch(pattern + i, thestr + j)) return 1; + } while(1); + } else { + j++; + if (j >= sl) return 0; + if (pattern[i] != thestr[j]) return 0; + } + } + return (pl == sl); +} diff --git a/ProbLog/simplecudd/general.h b/ProbLog/simplecudd/general.h new file mode 100644 index 000000000..4441ed623 --- /dev/null +++ b/ProbLog/simplecudd/general.h @@ -0,0 +1,159 @@ +/******************************************************************************\ +* * +* SimpleCUDD library (www.cs.kuleuven.be/~theo/tools/simplecudd.html) * +* SimpleCUDD was developed at Katholieke Universiteit Leuven(www.kuleuven.be) * +* * +* Copyright T. Mantadelis and Katholieke Universiteit Leuven 2008 * +* * +* Author: Theofrastos Mantadelis * +* File: general.h * +* * +******************************************************************************** +* * +* The "Artistic License" * +* * +* Preamble * +* * +* The intent of this document is to state the conditions under which a * +* Package may be copied, such that the Copyright Holder maintains some * +* semblance of artistic control over the development of the package, * +* while giving the users of the package the right to use and distribute * +* the Package in a more-or-less customary fashion, plus the right to make * +* reasonable modifications. * +* * +* Definitions: * +* * +* "Package" refers to the collection of files distributed by the * +* Copyright Holder, and derivatives of that collection of files * +* created through textual modification. * +* * +* "Standard Version" refers to such a Package if it has not been * +* modified, or has been modified in accordance with the wishes * +* of the Copyright Holder as specified below. * +* * +* "Copyright Holder" is whoever is named in the copyright or * +* copyrights for the package. * +* * +* "You" is you, if you're thinking about copying or distributing * +* this Package. * +* * +* "Reasonable copying fee" is whatever you can justify on the * +* basis of media cost, duplication charges, time of people involved, * +* and so on. (You will not be required to justify it to the * +* Copyright Holder, but only to the computing community at large * +* as a market that must bear the fee.) * +* * +* "Freely Available" means that no fee is charged for the item * +* itself, though there may be fees involved in handling the item. * +* It also means that recipients of the item may redistribute it * +* under the same conditions they received it. * +* * +* 1. You may make and give away verbatim copies of the source form of the * +* Standard Version of this Package without restriction, provided that you * +* duplicate all of the original copyright notices and associated disclaimers. * +* * +* 2. You may apply bug fixes, portability fixes and other modifications * +* derived from the Public Domain or from the Copyright Holder. A Package * +* modified in such a way shall still be considered the Standard Version. * +* * +* 3. You may otherwise modify your copy of this Package in any way, provided * +* that you insert a prominent notice in each changed file stating how and * +* when you changed that file, and provided that you do at least ONE of the * +* following: * +* * +* a) place your modifications in the Public Domain or otherwise make them * +* Freely Available, such as by posting said modifications to Usenet or * +* an equivalent medium, or placing the modifications on a major archive * +* site such as uunet.uu.net, or by allowing the Copyright Holder to include * +* your modifications in the Standard Version of the Package. * +* * +* b) use the modified Package only within your corporation or organization. * +* * +* c) rename any non-standard executables so the names do not conflict * +* with standard executables, which must also be provided, and provide * +* a separate manual page for each non-standard executable that clearly * +* documents how it differs from the Standard Version. * +* * +* d) make other distribution arrangements with the Copyright Holder. * +* * +* 4. You may distribute the programs of this Package in object code or * +* executable form, provided that you do at least ONE of the following: * +* * +* a) distribute a Standard Version of the executables and library files, * +* together with instructions (in the manual page or equivalent) on where * +* to get the Standard Version. * +* * +* b) accompany the distribution with the machine-readable source of * +* the Package with your modifications. * +* * +* c) give non-standard executables non-standard names, and clearly * +* document the differences in manual pages (or equivalent), together * +* with instructions on where to get the Standard Version. * +* * +* d) make other distribution arrangements with the Copyright Holder. * +* * +* 5. You may charge a reasonable copying fee for any distribution of this * +* Package. You may charge any fee you choose for support of this * +* Package. You may not charge a fee for this Package itself. However, * +* you may distribute this Package in aggregate with other (possibly * +* commercial) programs as part of a larger (possibly commercial) software * +* distribution provided that you do not advertise this Package as a * +* product of your own. You may embed this Package's interpreter within * +* an executable of yours (by linking); this shall be construed as a mere * +* form of aggregation, provided that the complete Standard Version of the * +* interpreter is so embedded. * +* * +* 6. The scripts and library files supplied as input to or produced as * +* output from the programs of this Package do not automatically fall * +* under the copyright of this Package, but belong to whoever generated * +* them, and may be sold commercially, and may be aggregated with this * +* Package. If such scripts or library files are aggregated with this * +* Package via the so-called "undump" or "unexec" methods of producing a * +* binary executable image, then distribution of such an image shall * +* neither be construed as a distribution of this Package nor shall it * +* fall under the restrictions of Paragraphs 3 and 4, provided that you do * +* not represent such an executable image as a Standard Version of this * +* Package. * +* * +* 7. C subroutines (or comparably compiled subroutines in other * +* languages) supplied by you and linked into this Package in order to * +* emulate subroutines and variables of the language defined by this * +* Package shall not be considered part of this Package, but are the * +* equivalent of input as in Paragraph 6, provided these subroutines do * +* not change the language in any way that would cause it to fail the * +* regression tests for the language. * +* * +* 8. Aggregation of this Package with a commercial distribution is always * +* permitted provided that the use of this Package is embedded; that is, * +* when no overt attempt is made to make this Package's interfaces visible * +* to the end user of the commercial distribution. Such use shall not be * +* construed as a distribution of this Package. * +* * +* 9. The name of the Copyright Holder may not be used to endorse or promote * +* products derived from this software without specific prior written * +* permission. * +* * +* 10. THIS PACKAGE IS PROVIDED "AS IS" AND WITHOUT ANY EXPRESS OR * +* IMPLIED WARRANTIES, INCLUDING, WITHOUT LIMITATION, THE IMPLIED * +* WARRANTIES OF MERCHANTIBILITY AND FITNESS FOR A PARTICULAR PURPOSE. * +* * +* The End * +* * +\******************************************************************************/ + + +#include +#include +#include + +#define IsNumberDigit(c) ('0' <= c && c <= '9') +#define IsSignDigit(c) (c == '+' || c == '-') +#define isOperator(x) (x == '+' || x == '*' || x == '#' || x == '=') +#define freadline(fd) freadstr(fd, "\n"); + +int IsRealNumber(char *c); +int IsPosNumber(const char *c); +int IsNumber(const char *c); +char * freadstr(FILE *fd, const char *separators); +int CharIn(const char c, const char *in); +int patternmatch(char *pattern, char *thestr); diff --git a/ProbLog/simplecudd/simplecudd.c b/ProbLog/simplecudd/simplecudd.c new file mode 100644 index 000000000..6ee290377 --- /dev/null +++ b/ProbLog/simplecudd/simplecudd.c @@ -0,0 +1,1620 @@ +/******************************************************************************\ +* * +* SimpleCUDD library (www.cs.kuleuven.be/~theo/tools/simplecudd.html) * +* SimpleCUDD was developed at Katholieke Universiteit Leuven(www.kuleuven.be) * +* * +* Copyright T. Mantadelis and Katholieke Universiteit Leuven 2008 * +* * +* Author: Theofrastos Mantadelis * +* File: simplecudd.c * +* * +******************************************************************************** +* * +* The "Artistic License" * +* * +* Preamble * +* * +* The intent of this document is to state the conditions under which a * +* Package may be copied, such that the Copyright Holder maintains some * +* semblance of artistic control over the development of the package, * +* while giving the users of the package the right to use and distribute * +* the Package in a more-or-less customary fashion, plus the right to make * +* reasonable modifications. * +* * +* Definitions: * +* * +* "Package" refers to the collection of files distributed by the * +* Copyright Holder, and derivatives of that collection of files * +* created through textual modification. * +* * +* "Standard Version" refers to such a Package if it has not been * +* modified, or has been modified in accordance with the wishes * +* of the Copyright Holder as specified below. * +* * +* "Copyright Holder" is whoever is named in the copyright or * +* copyrights for the package. * +* * +* "You" is you, if you're thinking about copying or distributing * +* this Package. * +* * +* "Reasonable copying fee" is whatever you can justify on the * +* basis of media cost, duplication charges, time of people involved, * +* and so on. (You will not be required to justify it to the * +* Copyright Holder, but only to the computing community at large * +* as a market that must bear the fee.) * +* * +* "Freely Available" means that no fee is charged for the item * +* itself, though there may be fees involved in handling the item. * +* It also means that recipients of the item may redistribute it * +* under the same conditions they received it. * +* * +* 1. You may make and give away verbatim copies of the source form of the * +* Standard Version of this Package without restriction, provided that you * +* duplicate all of the original copyright notices and associated disclaimers. * +* * +* 2. You may apply bug fixes, portability fixes and other modifications * +* derived from the Public Domain or from the Copyright Holder. A Package * +* modified in such a way shall still be considered the Standard Version. * +* * +* 3. You may otherwise modify your copy of this Package in any way, provided * +* that you insert a prominent notice in each changed file stating how and * +* when you changed that file, and provided that you do at least ONE of the * +* following: * +* * +* a) place your modifications in the Public Domain or otherwise make them * +* Freely Available, such as by posting said modifications to Usenet or * +* an equivalent medium, or placing the modifications on a major archive * +* site such as uunet.uu.net, or by allowing the Copyright Holder to include * +* your modifications in the Standard Version of the Package. * +* * +* b) use the modified Package only within your corporation or organization. * +* * +* c) rename any non-standard executables so the names do not conflict * +* with standard executables, which must also be provided, and provide * +* a separate manual page for each non-standard executable that clearly * +* documents how it differs from the Standard Version. * +* * +* d) make other distribution arrangements with the Copyright Holder. * +* * +* 4. You may distribute the programs of this Package in object code or * +* executable form, provided that you do at least ONE of the following: * +* * +* a) distribute a Standard Version of the executables and library files, * +* together with instructions (in the manual page or equivalent) on where * +* to get the Standard Version. * +* * +* b) accompany the distribution with the machine-readable source of * +* the Package with your modifications. * +* * +* c) give non-standard executables non-standard names, and clearly * +* document the differences in manual pages (or equivalent), together * +* with instructions on where to get the Standard Version. * +* * +* d) make other distribution arrangements with the Copyright Holder. * +* * +* 5. You may charge a reasonable copying fee for any distribution of this * +* Package. You may charge any fee you choose for support of this * +* Package. You may not charge a fee for this Package itself. However, * +* you may distribute this Package in aggregate with other (possibly * +* commercial) programs as part of a larger (possibly commercial) software * +* distribution provided that you do not advertise this Package as a * +* product of your own. You may embed this Package's interpreter within * +* an executable of yours (by linking); this shall be construed as a mere * +* form of aggregation, provided that the complete Standard Version of the * +* interpreter is so embedded. * +* * +* 6. The scripts and library files supplied as input to or produced as * +* output from the programs of this Package do not automatically fall * +* under the copyright of this Package, but belong to whoever generated * +* them, and may be sold commercially, and may be aggregated with this * +* Package. If such scripts or library files are aggregated with this * +* Package via the so-called "undump" or "unexec" methods of producing a * +* binary executable image, then distribution of such an image shall * +* neither be construed as a distribution of this Package nor shall it * +* fall under the restrictions of Paragraphs 3 and 4, provided that you do * +* not represent such an executable image as a Standard Version of this * +* Package. * +* * +* 7. C subroutines (or comparably compiled subroutines in other * +* languages) supplied by you and linked into this Package in order to * +* emulate subroutines and variables of the language defined by this * +* Package shall not be considered part of this Package, but are the * +* equivalent of input as in Paragraph 6, provided these subroutines do * +* not change the language in any way that would cause it to fail the * +* regression tests for the language. * +* * +* 8. Aggregation of this Package with a commercial distribution is always * +* permitted provided that the use of this Package is embedded; that is, * +* when no overt attempt is made to make this Package's interfaces visible * +* to the end user of the commercial distribution. Such use shall not be * +* construed as a distribution of this Package. * +* * +* 9. The name of the Copyright Holder may not be used to endorse or promote * +* products derived from this software without specific prior written * +* permission. * +* * +* 10. THIS PACKAGE IS PROVIDED "AS IS" AND WITHOUT ANY EXPRESS OR * +* IMPLIED WARRANTIES, INCLUDING, WITHOUT LIMITATION, THE IMPLIED * +* WARRANTIES OF MERCHANTIBILITY AND FITNESS FOR A PARTICULAR PURPOSE. * +* * +* The End * +* * +\******************************************************************************/ + + +#include "simplecudd.h" + +/* BDD manager initialization */ + +int _debug = 0; +int _RapidLoad = 0; +int _maxbufsize = 0; + +DdManager* simpleBDDinit(int varcnt) { + DdManager *temp; + temp = Cudd_Init(varcnt, 0, CUDD_UNIQUE_SLOTS, CUDD_CACHE_SLOTS, 0); + Cudd_AutodynEnable(temp, CUDD_REORDER_GROUP_SIFT); + Cudd_SetMaxCacheHard(temp, 1024*1024*1024); + Cudd_SetLooseUpTo(temp, 1024*1024*512); + if (_debug) Cudd_EnableReorderingReporting(temp); + return temp; +} + +/* BDD tree travesrsing */ + +DdNode* HighNodeOf(DdManager *manager, DdNode *node) { + DdNode *tmp; + if (IsHigh(manager, node)) return HIGH(manager); + if (IsLow(manager, node)) return LOW(manager); + tmp = Cudd_Regular(node); + if (Cudd_IsComplement(node)) return NOT(tmp->type.kids.T); + return tmp->type.kids.T; +} + +DdNode* LowNodeOf(DdManager *manager, DdNode *node) { + DdNode *tmp; + if (IsHigh(manager, node)) return HIGH(manager); + if (IsLow(manager, node)) return LOW(manager); + tmp = Cudd_Regular(node); + if (Cudd_IsComplement(node)) return NOT(tmp->type.kids.E); + return tmp->type.kids.E; +} + +/* BDD tree generation */ + +DdNode* D_BDDAnd(DdManager *manager, DdNode *bdd1, DdNode *bdd2) { + DdNode *tmp; + tmp = Cudd_bddAnd(manager, bdd1, bdd2); + Cudd_Ref(tmp); + Cudd_RecursiveDeref(manager, bdd2); + return tmp; +} + +DdNode* D_BDDNand(DdManager *manager, DdNode *bdd1, DdNode *bdd2) { + DdNode *tmp; + tmp = Cudd_bddNand(manager, bdd1, bdd2); + Cudd_Ref(tmp); + Cudd_RecursiveDeref(manager, bdd2); + return tmp; +} + +DdNode* D_BDDOr(DdManager *manager, DdNode *bdd1, DdNode *bdd2) { + DdNode *tmp; + tmp = Cudd_bddOr(manager, bdd1, bdd2); + Cudd_Ref(tmp); + Cudd_RecursiveDeref(manager, bdd2); + return tmp; +} + +DdNode* D_BDDNor(DdManager *manager, DdNode *bdd1, DdNode *bdd2) { + DdNode *tmp; + tmp = Cudd_bddNor(manager, bdd1, bdd2); + Cudd_Ref(tmp); + Cudd_RecursiveDeref(manager, bdd2); + return tmp; +} + +DdNode* D_BDDXor(DdManager *manager, DdNode *bdd1, DdNode *bdd2) { + DdNode *tmp; + tmp = Cudd_bddXor(manager, bdd1, bdd2); + Cudd_Ref(tmp); + Cudd_RecursiveDeref(manager, bdd2); + return tmp; +} + +DdNode* D_BDDXnor(DdManager *manager, DdNode *bdd1, DdNode *bdd2) { + DdNode *tmp; + tmp = Cudd_bddXnor(manager, bdd1, bdd2); + Cudd_Ref(tmp); + Cudd_RecursiveDeref(manager, bdd2); + return tmp; +} + +/* file manipulation */ + +bddfileheader ReadFileHeader(char *filename) { + bddfileheader temp; + char *header; + temp.inputfile = NULL; + temp.version = 0; + temp.varcnt = 0; + temp.varstart = 0; + temp.intercnt = 0; + temp.filetype = BDDFILE_OTHER; + if ((temp.inputfile = fopen(filename, "r")) == NULL) { + perror(filename); + temp.filetype = BDDFILE_ERROR; + return temp; + } + // Read file header + if (!feof(temp.inputfile)) { + header = freadline(temp.inputfile); + temp.version = CheckFileVersion(header); + if (temp.version > -1) temp.filetype = (strlen(header) == 5) * BDDFILE_SCRIPT + (strlen(header) == 7) * BDDFILE_NODEDUMP; + free(header); + switch (temp.filetype) { + case BDDFILE_SCRIPT: + switch (temp.version) { + case 1: + fscanf(temp.inputfile, "%i\n", &temp.varcnt); + fscanf(temp.inputfile, "%i\n", &temp.varstart); + fscanf(temp.inputfile, "%i\n", &temp.intercnt); + break; + default: + fclose(temp.inputfile); + temp.inputfile = NULL; + break; + } + break; + case BDDFILE_NODEDUMP: + switch (temp.version) { + case 1: + fscanf(temp.inputfile, "%i\n", &temp.varcnt); + fscanf(temp.inputfile, "%i\n", &temp.varstart); + break; + default: + fclose(temp.inputfile); + temp.inputfile = NULL; + break; + } + break; + case BDDFILE_OTHER: + fclose(temp.inputfile); + temp.inputfile = NULL; + break; + default: + fclose(temp.inputfile); + temp.inputfile = NULL; + break; + } + } + return temp; +} + +int CheckFileVersion(const char *version) { + if (strlen(version) < 5) return -1; + if (strlen(version) == 5 && version[0] == '@' && version[1] == 'B' && version[2] == 'D' && version[3] == 'D') return atoi(version + 4); + if (strlen(version) == 7 && version[0] == '@' && version[1] == 'N' && version[2] == 'O' && version[3] == 'D' + && version[4] == 'E' && version[5] == 'S') return atoi(version + 6); + return -1; +} + +int simpleBDDtoDot(DdManager *manager, DdNode *bdd, char *filename) { + DdNode *f[1]; + int ret; + FILE *fd; + f[0] = Cudd_BddToAdd(manager, bdd); + fd = fopen(filename, "w"); + if (fd == NULL) { + perror(filename); + return -1; + } + ret = Cudd_DumpDot(manager, 1, f, NULL, NULL, fd); + fclose(fd); + return ret; +} + +int simpleNamedBDDtoDot(DdManager *manager, namedvars varmap, DdNode *bdd, char *filename) { + DdNode *f[1]; + int ret; + FILE *fd; + f[0] = Cudd_BddToAdd(manager, bdd); + fd = fopen(filename, "w"); + if (fd == NULL) { + perror(filename); + return -1; + } + ret = Cudd_DumpDot(manager, 1, f, varmap.vars, NULL, fd); + fclose(fd); + return ret; +} + +int SaveNodeDump(DdManager *manager, namedvars varmap, DdNode *bdd, char *filename) { + hisqueue *Nodes; + FILE *outputfile; + int i; + if ((outputfile = fopen(filename, "w")) == NULL) { + perror(filename); + return -1; + } + fprintf(outputfile, "%s\n%i\n%i\n", "@NODES1", varmap.varcnt, varmap.varstart); + Nodes = InitHistory(varmap.varcnt); + for (i = 0; i < varmap.varcnt; i++) + fprintf(outputfile, "%s\t%i\n", varmap.vars[i], Cudd_ReadPerm(manager, i)); + if (bdd == HIGH(manager)) fprintf(outputfile, "TRUE\t0\tTRUE\t0\tTRUE\t0\n"); + else if (bdd == LOW(manager)) fprintf(outputfile, "FALSE\t0\tFALSE\t0\tFALSE\t0\n"); + else SaveExpand(manager, varmap, Nodes, bdd, outputfile); + ReInitHistory(Nodes, varmap.varcnt); + free(Nodes); + fclose(outputfile); + return 0; +} + +void SaveExpand(DdManager *manager, namedvars varmap, hisqueue *Nodes, DdNode *Current, FILE *outputfile) { + DdNode *h, *l; + hisnode *Found; + char *curnode; + int inode; + if (Current != HIGH(manager) && Current != LOW(manager)) { + if ((Found = GetNode(Nodes, varmap.varstart, Current)) == NULL) { + AddNode(Nodes, varmap.varstart, Current, 0.0, 0, NULL); + Found = GetNode(Nodes, varmap.varstart, Current); + } + if (!(Found->ivalue)) { + Found->ivalue = 1; + curnode = GetNodeVarNameDisp(manager, varmap, Current); + inode = GetNodeIndex(Nodes, varmap.varstart, Current); + fprintf(outputfile, "%s\t%i\t", curnode, inode); + h = HighNodeOf(manager, Current); + if (h == HIGH(manager)) { + fprintf(outputfile, "TRUE\t0\t"); + } else if (h == LOW(manager)) { + fprintf(outputfile, "FALSE\t0\t"); + } else { + if (GetNode(Nodes, varmap.varstart, h) == NULL) AddNode(Nodes, varmap.varstart, h, 0.0, 0, NULL); + curnode = GetNodeVarNameDisp(manager, varmap, h); + inode = GetNodeIndex(Nodes, varmap.varstart, h); + fprintf(outputfile, "%s\t%i\t", curnode, inode); + } + l = LowNodeOf(manager, Current); + if (l == HIGH(manager)) { + fprintf(outputfile, "TRUE\t0\n"); + } else if (l == LOW(manager)) { + fprintf(outputfile, "FALSE\t0\n"); + } else { + if (GetNode(Nodes, varmap.varstart, l) == NULL) AddNode(Nodes, varmap.varstart, l, 0.0, 0, NULL); + curnode = GetNodeVarNameDisp(manager, varmap, l); + inode = GetNodeIndex(Nodes, varmap.varstart, l); + fprintf(outputfile, "%s\t%i\n", curnode, inode); + } + SaveExpand(manager, varmap, Nodes, l, outputfile); + SaveExpand(manager, varmap, Nodes, h, outputfile); + } + } +} + +DdNode * LoadNodeDump(DdManager *manager, namedvars varmap, FILE *inputfile) { + hisqueue *Nodes; + nodeline temp; + DdNode *ret; + int i, pos, *perm; + char *varnam; + perm = (int *) malloc(sizeof(int) * varmap.varcnt); + Nodes = InitHistory(varmap.varcnt); + for (i = 0; i < varmap.varcnt; i++) { + varnam = freadstr(inputfile, "\t"); + pos = atoi(freadstr(inputfile, "\n")); + AddNamedVarAt(varmap, varnam, pos); + perm[pos] = pos; + } + temp.varname = freadstr(inputfile, "\t"); + fscanf(inputfile, "%i\t", &temp.nodenum); + temp.truevar = freadstr(inputfile, "\t"); + fscanf(inputfile, "%i\t", &temp.truenode); + temp.falsevar = freadstr(inputfile, "\t"); + fscanf(inputfile, "%i\n", &temp.falsenode); + ret = LoadNodeRec(manager, varmap, Nodes, inputfile, temp); + free(temp.varname); + free(temp.truevar); + free(temp.falsevar); + fclose(inputfile); + ReInitHistory(Nodes, varmap.varcnt); + free(Nodes); + Cudd_Ref(ret); + Cudd_ShuffleHeap(manager, perm); + for (i = 0; i < varmap.varcnt; i++) varmap.ivalue[i] = 0; + return ret; +} + +DdNode * LoadNodeRec(DdManager *manager, namedvars varmap, hisqueue *Nodes, FILE *inputfile, nodeline current) { + nodeline temp; + DdNode *newnode, *truenode, *falsenode; + int index; + newnode = GetIfExists(manager, varmap, Nodes, current.varname, current.nodenum); + if (newnode != NULL) return newnode; + falsenode = GetIfExists(manager, varmap, Nodes, current.falsevar, current.falsenode); + if (falsenode == NULL) { + temp.varname = freadstr(inputfile, "\t"); + fscanf(inputfile, "%i\t", &temp.nodenum); + temp.truevar = freadstr(inputfile, "\t"); + fscanf(inputfile, "%i\t", &temp.truenode); + temp.falsevar = freadstr(inputfile, "\t"); + fscanf(inputfile, "%i\n", &temp.falsenode); + falsenode = LoadNodeRec(manager, varmap, Nodes, inputfile, temp); + free(temp.varname); + free(temp.truevar); + free(temp.falsevar); + } + truenode = GetIfExists(manager, varmap, Nodes, current.truevar, current.truenode); + if (truenode == NULL) { + temp.varname = freadstr(inputfile, "\t"); + fscanf(inputfile, "%i\t", &temp.nodenum); + temp.truevar = freadstr(inputfile, "\t"); + fscanf(inputfile, "%i\t", &temp.truenode); + temp.falsevar = freadstr(inputfile, "\t"); + fscanf(inputfile, "%i\n", &temp.falsenode); + truenode = LoadNodeRec(manager, varmap, Nodes, inputfile, temp); + free(temp.varname); + free(temp.truevar); + free(temp.falsevar); + } + index = GetNamedVarIndex(varmap, current.varname); + if (!varmap.ivalue[index]) { + varmap.ivalue[index] = 1; + newnode = GetVar(manager, varmap.varstart + index); + //Cudd_RecursiveDeref(manager, newnode->type.kids.T); + //Cudd_RecursiveDeref(manager, newnode->type.kids.E); + newnode->type.kids.T = Cudd_NotCond(truenode, Cudd_IsComplement(truenode)); + newnode->type.kids.E = Cudd_NotCond(falsenode, Cudd_IsComplement(truenode)); + Cudd_Ref(newnode->type.kids.T); + Cudd_Ref(newnode->type.kids.E); + Cudd_Ref(newnode); + } else { + if (_RapidLoad == 1) { + newnode = cuddAllocNode(manager); + if (newnode != NULL) { + newnode->index = varmap.varstart + index; + newnode->type.kids.T = Cudd_NotCond(truenode, Cudd_IsComplement(truenode)); + newnode->type.kids.E = Cudd_NotCond(falsenode, Cudd_IsComplement(truenode)); + Cudd_Ref(newnode->type.kids.T); + Cudd_Ref(newnode->type.kids.E); + Cudd_Ref(newnode); + } + } else { + newnode = cuddUniqueInter(manager, varmap.varstart + index, Cudd_NotCond(truenode, Cudd_IsComplement(truenode)), Cudd_NotCond(falsenode, Cudd_IsComplement(truenode))); + if (newnode != NULL) { + Cudd_Ref(newnode); + } else { + newnode = cuddAllocNode(manager); + if (newnode != NULL) { + newnode->index = varmap.varstart + index; + newnode->type.kids.T = Cudd_NotCond(truenode, Cudd_IsComplement(truenode)); + newnode->type.kids.E = Cudd_NotCond(falsenode, Cudd_IsComplement(truenode)); + Cudd_Ref(newnode->type.kids.T); + Cudd_Ref(newnode->type.kids.E); + Cudd_Ref(newnode); + } + } + } + } + if (newnode != NULL) { + Nodes[index].thenode[current.nodenum].key = Cudd_NotCond(newnode, Cudd_IsComplement(truenode)); + return Cudd_NotCond(newnode, Cudd_IsComplement(truenode)); + } + return NULL; +} + +DdNode * GetIfExists(DdManager *manager, namedvars varmap, hisqueue *Nodes, char *varname, int nodenum) { + int index; + if (strcmp(varname, "TRUE") == 0) return HIGH(manager); + if (strcmp(varname, "FALSE") == 0) return LOW(manager); + index = GetNamedVarIndex(varmap, varname); + if (index == -1 * varmap.varcnt) { + fprintf(stderr, "Error: more variables requested than initialized.\n"); + exit(-1); + } + if ((index < 0) || (index == 0 && varmap.vars[0] == NULL)) { + index = AddNamedVar(varmap, varname); + } + ExpandNodes(Nodes, index, nodenum); + if (Nodes[index].thenode[nodenum].key != NULL) return Nodes[index].thenode[nodenum].key; + return NULL; +} + +void ExpandNodes(hisqueue *Nodes, int index, int nodenum) { + int i; + if (Nodes[index].cnt > nodenum) return; + Nodes[index].thenode = (hisnode *) realloc(Nodes[index].thenode, (nodenum + 1) * sizeof(hisnode)); + for (i = Nodes[index].cnt; i < nodenum + 1; i++) { + Nodes[index].thenode[i].key = NULL; + Nodes[index].thenode[i].ivalue = 0; + Nodes[index].thenode[i].dvalue = 0.0; + Nodes[index].thenode[i].dynvalue = NULL; + } + Nodes[index].cnt = nodenum + 1; +} + +int LoadVariableData(namedvars varmap, char *filename) { + FILE *data; + char *dataread, buf, *varname, *dynvalue; + double dvalue = 0.0; + int icur = 0, maxbufsize = 10, hasvar = 0, index = -1, idat = 0, ivalue = 0; + dynvalue = NULL; + if ((data = fopen(filename, "r")) == NULL) { + perror("fopen"); + return -1; + } + dataread = (char *) malloc(sizeof(char) * maxbufsize); + while(!feof(data)) { + fread(&buf, 1, 1, data); + if (buf == '\n') { + dataread[icur] = '\0'; + icur = 0; + buf = ' '; + if (dataread[0] == '@') { + if (hasvar) { + for (index = 0; index < varmap.varcnt; index++) { + if (patternmatch(varname, varmap.vars[index])) { + varmap.loaded[index] = 1; + varmap.dvalue[index] = dvalue; + varmap.ivalue[index] = ivalue; + if (varmap.dynvalue[index] != NULL) { + free(varmap.dynvalue[index]); + varmap.dynvalue[index] = NULL; + } + if (dynvalue != NULL) { + varmap.dynvalue[index] = (void *) malloc(sizeof(char) * (strlen(dynvalue) + 1)); + strcpy(varmap.dynvalue[index], dynvalue); + } + } + } + dvalue = 0.0; + ivalue = 0; + if (dynvalue != NULL) { + free(dynvalue); + dynvalue = NULL; + } + free(varname); + } + varname = (char *) malloc(sizeof(char) * strlen(dataread)); + strcpy(varname, dataread + 1); + hasvar = 1; + idat = 0; + } else { + if (hasvar >= 0) { + switch(idat) { + case 0: + if (IsRealNumber(dataread)) dvalue = atof(dataread); + else { + fprintf(stderr, "Error at file: %s. Variable: %s can't have non real value: %s.\n", filename, varname, dataread); + fclose(data); + free(varname); + free(dataread); + return -2; + } + idat++; + break; + case 1: + if (IsNumber(dataread)) ivalue = atoi(dataread); + else { + fprintf(stderr, "Error at file: %s. Variable: %s can't have non integer value: %s.\n", filename, varname, dataread); + fclose(data); + free(varname); + free(dataread); + return -2; + } + idat++; + break; + case 2: + dynvalue = malloc(sizeof(char) * (strlen(dataread) + 1)); + strcpy(dynvalue, dataread); + break; + } + } + } + } else { + dataread[icur] = buf; + icur++; + if (icur == _maxbufsize) { + fprintf(stderr, "Error: Maximum buffer size(%i) exceeded.\n", _maxbufsize); + fclose(data); + free(varname); + free(dataread); + return -2; + } + while (icur > maxbufsize - 1) { + maxbufsize *= 2; + dataread = (char *) realloc(dataread, sizeof(char) * maxbufsize); + } + } + } + if (hasvar) { + for (index = 0; index < varmap.varcnt; index++) { + if (patternmatch(varname, varmap.vars[index])) { + varmap.loaded[index] = 1; + varmap.dvalue[index] = dvalue; + varmap.ivalue[index] = ivalue; + if (dynvalue != NULL) { + varmap.dynvalue[index] = (void *) malloc(sizeof(char) * (strlen(dynvalue) + 1)); + strcpy(varmap.dynvalue[index], dynvalue); + } + } + } + if (dynvalue != NULL) { + free(dynvalue); + dynvalue = NULL; + } + free(varname); + } + fclose(data); + free(dataread); + return 0; +} + +/* Queue for node storing to avoid loops */ + +hisqueue* InitHistory(int varcnt) { + int i; + hisqueue *HisQueue; + HisQueue = (hisqueue *) malloc(sizeof(hisqueue) * varcnt); + for (i = 0; i < varcnt; i++) { + HisQueue[i].thenode = NULL; + HisQueue[i].cnt = 0; + } + return HisQueue; +} + +void ReInitHistory(hisqueue *HisQueue, int varcnt) { + int i, j; + for (i = 0; i < varcnt; i++) { + if (HisQueue[i].thenode != NULL) { + for (j = 0; j < HisQueue[i].cnt; j++) + if (HisQueue[i].thenode[j].dynvalue != NULL) free(HisQueue[i].thenode[j].dynvalue); + free(HisQueue[i].thenode); + HisQueue[i].thenode = NULL; + } + HisQueue[i].cnt = 0; + } +} + +void AddNode(hisqueue *HisQueue, int varstart, DdNode *node, double dvalue, int ivalue, void *dynvalue) { + int index = GetIndex(node) - varstart; + HisQueue[index].thenode = (hisnode *) realloc(HisQueue[index].thenode, (HisQueue[index].cnt + 1) * sizeof(hisnode)); + HisQueue[index].thenode[HisQueue[index].cnt].key = node; + HisQueue[index].thenode[HisQueue[index].cnt].dvalue = dvalue; + HisQueue[index].thenode[HisQueue[index].cnt].ivalue = ivalue; + HisQueue[index].thenode[HisQueue[index].cnt].dynvalue = dynvalue; + HisQueue[index].cnt += 1; +} + +hisnode* GetNode(hisqueue *HisQueue, int varstart, DdNode *node) { + int i; + int index = GetIndex(node) - varstart; + for(i = 0; i < HisQueue[index].cnt; i++) { + if (HisQueue[index].thenode[i].key == node) return &(HisQueue[index].thenode[i]); + } + return NULL; +} + +int GetNodeIndex(hisqueue *HisQueue, int varstart, DdNode *node) { + int i; + int index = GetIndex(node) - varstart; + for(i = 0; i < HisQueue[index].cnt; i++) { + if (HisQueue[index].thenode[i].key == node) return i; + } + return -1; +} + +/* Named variables */ + +namedvars InitNamedVars(int varcnt, int varstart) { + namedvars temp; + int i; + temp.varcnt = varcnt; + temp.varstart = varstart; + temp.vars = (char **) malloc(sizeof(char *) * varcnt); + temp.loaded = (int *) malloc(sizeof(int) * varcnt); + temp.dvalue = (double *) malloc(sizeof(double) * varcnt); + temp.ivalue = (int *) malloc(sizeof(int) * varcnt); + temp.dynvalue = (void **) malloc(sizeof(int) * varcnt); + for (i = 0; i < varcnt; i++) { + temp.vars[i] = NULL; + temp.loaded[i] = 0; + temp.dvalue[i] = 0.0; + temp.ivalue[i] = 0; + temp.dynvalue[i] = NULL; + } + return temp; +} + +void EnlargeNamedVars(namedvars *varmap, int newvarcnt) { + int i; + varmap->vars = (char **) realloc(varmap->vars, sizeof(char *) * newvarcnt); + varmap->loaded = (int *) realloc(varmap->loaded, sizeof(int) * newvarcnt); + varmap->dvalue = (double *) realloc(varmap->dvalue, sizeof(double) * newvarcnt); + varmap->ivalue = (int *) realloc(varmap->ivalue, sizeof(int) * newvarcnt); + varmap->dynvalue = (void **) realloc(varmap->dynvalue, sizeof(int) * newvarcnt); + for (i = varmap->varcnt; i < newvarcnt; i++) { + varmap->vars[i] = NULL; + varmap->loaded[i] = 0; + varmap->dvalue[i] = 0.0; + varmap->ivalue[i] = 0; + varmap->dynvalue[i] = NULL; + } + varmap->varcnt = newvarcnt; +} + +int AddNamedVarAt(namedvars varmap, const char *varname, int index) { + if (varmap.varcnt > index) { + varmap.vars[index] = (char *) malloc(sizeof(char) * (strlen(varname) + 1)); + strcpy(varmap.vars[index], varname); + return index; + } + return -1; +} + +int AddNamedVar(namedvars varmap, const char *varname) { + int index = GetNamedVarIndex(varmap, varname); + if (index == -1 * varmap.varcnt) { + return -1; + } else if ((index < 0) || (index == 0 && varmap.vars[0] == NULL)) { + index *= -1; + varmap.vars[index] = (char *) malloc(sizeof(char) * (strlen(varname) + 1)); + strcpy(varmap.vars[index], varname); + } + return index; +} + +void SetNamedVarValuesAt(namedvars varmap, int index, double dvalue, int ivalue, void *dynvalue) { + varmap.dvalue[index] = dvalue; + varmap.ivalue[index] = ivalue; + varmap.dynvalue[index] = dynvalue; +} + +int SetNamedVarValues(namedvars varmap, const char *varname, double dvalue, int ivalue, void *dynvalue) { + int index = GetNamedVarIndex(varmap, varname); + if (index == -1 * varmap.varcnt) { + return -1; + } else if ((index < 0) || (index == 0 && varmap.vars[0] == NULL)) { + index *= -1; + varmap.vars[index] = (char *) malloc(sizeof(char) * (strlen(varname) + 1)); + strcpy(varmap.vars[index], varname); + varmap.dvalue[index] = dvalue; + varmap.ivalue[index] = ivalue; + varmap.dynvalue[index] = dynvalue; + } else { + varmap.dvalue[index] = dvalue; + varmap.ivalue[index] = ivalue; + varmap.dynvalue[index] = dynvalue; + } + return index; +} + +int GetNamedVarIndex(const namedvars varmap, const char *varname) { + int i; + for (i = 0; i < varmap.varcnt; i++) { + if (varmap.vars[i] == NULL) return -1 * i; + if (strcmp(varmap.vars[i], varname) == 0) return i; + } + return -1 * varmap.varcnt; +} + +char* GetNodeVarName(DdManager *manager, namedvars varmap, DdNode *node) { + if (node == NULL) return NULL; + if (node == HIGH(manager)) return "true"; + if (node == LOW(manager)) return "false"; + return varmap.vars[GetIndex(node) - varmap.varstart]; +} + +char* GetNodeVarNameDisp(DdManager *manager, namedvars varmap, DdNode *node) { + if (HIGH(manager) == node) return "TRUE"; + if (LOW(manager) == node) return "FALSE"; + if (NULL == node) return "(null)"; + return varmap.vars[GetIndex(node) - varmap.varstart]; +} + +int RepairVarcnt(namedvars *varmap) { + while (varmap->vars[varmap->varcnt - 1] == NULL) + varmap->varcnt--; + return varmap->varcnt; +} + +int all_loaded(namedvars varmap, int disp) { + int i, res = 1; + for (i = 0; i < varmap.varcnt; i++) { + if (varmap.loaded[i] == 0) { + res = 0; + if (disp) fprintf(stderr, "The variable: %s was not loaded with values.\n", varmap.vars[i]); else return 0; + } + } + return res; +} + +/* Parser */ + +DdNode* FileGenerateBDD(DdManager *manager, namedvars varmap, bddfileheader fileheader) { + int icomment, maxlinesize, icur, iline, curinter, iequal; + DdNode *Line, **inter; + char buf, *inputline, *filename; + bddfileheader interfileheader; + // Initialization of intermediate steps + inter = (DdNode **) malloc(sizeof(DdNode *) * fileheader.intercnt); + for (icur = 0; icur < fileheader.intercnt; icur++) inter[icur] = NULL; + // Read file data + interfileheader.inputfile = NULL; + filename = NULL; // For nested files + iequal = 0; // Flag for encountered = sign + icur = 0; // Pointer for inputline buffer location + iline = 5; // Current file line (first after header) + icomment = 0; // Flag for comments + maxlinesize = 80; // inputline starting buffer size + inputline = (char *) malloc(sizeof(char) * maxlinesize); + while(!feof(fileheader.inputfile)) { + fread(&buf, 1, 1, fileheader.inputfile); + if (buf == ';' || buf == '%' || buf == '$') icomment = 1; + if (buf == '\n') { + if (icomment) icomment = 0; + if (iequal > 1) { + fprintf(stderr, "Error at line: %i. Line contains more than 1 equal(=) signs.\n", iline); + fclose(fileheader.inputfile); + free(inter); + free(inputline); + return NULL; + } else iequal = 0; + if (icur > 0) { + inputline[icur] = '\0'; + if (inputline[0] != 'L') { + fprintf(stderr, "Error at line: %i. Intermediate results should start with L.\n", iline); + fclose(fileheader.inputfile); + free(inter); + free(inputline); + return NULL; + } + curinter = getInterBDD(inputline); + if (curinter == -1) { + if (inputline[0] == 'L' && IsPosNumber(inputline + 1)) { + curinter = atoi(inputline + 1) - 1; + if (curinter > -1 && curinter < fileheader.intercnt && inter[curinter] != NULL) { + if (_debug) fprintf(stderr, "Returned: %s\n", inputline); + fclose(fileheader.inputfile); + Line = inter[curinter]; + free(inter); + free(inputline); + return Line; + } else { + fprintf(stderr, "Error at line: %i. Return result asked doesn't exist.\n", iline); + fclose(fileheader.inputfile); + free(inter); + free(inputline); + return NULL; + } + } else { + fprintf(stderr, "Error at line: %i. Invalid intermediate result format.\n", iline); + fclose(fileheader.inputfile); + free(inter); + free(inputline); + return NULL; + } + } else if (curinter > -1 && curinter < fileheader.intercnt && inter[curinter] == NULL) { + if (_debug) fprintf(stderr, "%i %s\n", curinter, inputline); + filename = getFileName(inputline); + if (filename == NULL) { + Line = LineParser(manager, varmap, inter, fileheader.intercnt, inputline, iline); + } else { + interfileheader = ReadFileHeader(filename); + if (interfileheader.inputfile == NULL) { + //Line = simpleBDDload(manager, &varmap, filename); + Line = NULL; + } else { + Line = FileGenerateBDD(manager, varmap, interfileheader); + } + if (Line == NULL) fprintf(stderr, "Error at line: %i. Error in nested BDD file: %s.\n", iline, filename); + free(filename); + filename = NULL; + interfileheader.inputfile = NULL; + } + if (Line == NULL) { + fclose(fileheader.inputfile); + free(inter); + free(inputline); + return NULL; + } + inter[curinter] = Line; + icur = 0; + } else if (curinter > -1 && curinter < fileheader.intercnt && inter[curinter] != NULL) { + fprintf(stderr, "Error at line: %i. Intermediate results can't be overwritten.\n", iline); + fclose(fileheader.inputfile); + free(inter); + free(inputline); + return NULL; + } else { + fprintf(stderr, "Error at line: %i. Intermediate result asked doesn't exist.\n", iline); + fclose(fileheader.inputfile); + free(inter); + free(inputline); + return NULL; + } + } + iline++; + } else if (buf != ' ' && buf != '\t' && !icomment) { + if (buf == '=') iequal++; + inputline[icur] = buf; + icur += 1; + if (icur == _maxbufsize) { + fprintf(stderr, "Error: Maximum buffer size(%i) exceeded.\n", _maxbufsize); + fclose(fileheader.inputfile); + free(inter); + free(inputline); + return NULL; + } + while (icur > maxlinesize - 1) { + maxlinesize *= 2; + inputline = (char *) realloc(inputline, sizeof(char) * maxlinesize); + } + } + } + fprintf(stderr, "Error, file either doesn't end with a blank line or no return result was asked.\n"); + fclose(fileheader.inputfile); + free(inter); + free(inputline); + return NULL; +} + +int getInterBDD(char *function) { + int i, ret; + char *inter; + for (i = 0; i < strlen(function); i++) { + if (function[i] == '=') { + inter = (char *) malloc(sizeof(char) * i); + strncpy(inter, function + 1, i - 1); + inter[i - 1] = '\0'; + if (IsPosNumber(inter)) { + ret = atoi(inter) - 1; + free(inter); + return ret; + } else { + free(inter); + return -1; + } + } + } + return -1; +} + +char* getFileName(const char *function) { + int i = 0; + char *filename; + while(function[i] != '=' && (i + 1) < strlen(function)) i++; + if ((i + 1) < strlen(function)) { + i++; + if (function[i] == '<' && function[strlen(function) - 1] == '>') { + filename = (char *) malloc(sizeof(char) * strlen(function) - i); + strcpy(filename, function + i + 1); + filename[strlen(function) - i - 2] = '\0'; + return filename; + } + } + return NULL; +} + +DdNode* LineParser(DdManager *manager, namedvars varmap, DdNode **inter, int maxinter, char *function, int iline) { + int istart, iend, ilength, i, symbol, ivar, inegvar, inegoper, iconst; + long startAt, endAt; + double secs; + DdNode *bdd; + char *term, curoper; + bdd = HIGH(manager); + Cudd_Ref(bdd); + term = NULL; + ivar = -1; + curoper = '*'; + istart = -1; + iend = strlen(function) - 1; + ilength = -1; + symbol = -1; + inegvar = 0; + inegoper = 0; + iconst = 0; + for (i = strlen(function) - 1; i > -1; i--) { + if (symbol == -1 && isOperator(function[i])) { + symbol = i; + istart = i + 1; + ilength = iend - i; + iend = i - 1; + if (ilength > 0 && !(ilength == 1 && function[istart] == '~')) { + term = (char *) malloc(sizeof(char) * (ilength + 1)); + strncpy(term, function + istart, ilength); + term[ilength] = '\0'; + } else { + fprintf(stderr, "Line Parser Error at line: %i. An operator was encounter with no term at its right side.\n", iline); + free(term); + return NULL; + } + } + if (symbol != -1) { + if (term[0] == '~') inegvar = 1; else inegvar = 0; + if (term[0 + inegvar] != 'L') { + // Term is a variable + if (strcmp(term + inegvar, "TRUE") == 0) { + iconst = 1; + } else if (strcmp(term + inegvar, "FALSE") == 0) { + iconst = 1; + inegvar = 1; + } else { + iconst = 0; + ivar = AddNamedVar(varmap, term + inegvar); + if (ivar == -1) { + fprintf(stderr, "Line Parser Error at line: %i. More BDD variables than the reserved term: %s.\n", iline, term); + free(term); + return NULL; + } + } + if (_debug) fprintf(stderr, "%s\n", term); + if (_debug && !iconst) fprintf(stderr, "PNZ1:%.0f P1:%.0f S1:%i PNZ2:%.0f P2:%.0f S2:%i\n", + Cudd_CountPathsToNonZero(bdd), + Cudd_CountPath(bdd), + Cudd_DagSize(bdd), + Cudd_CountPathsToNonZero(GetVar(manager, ivar + varmap.varstart)), + Cudd_CountPath(GetVar(manager, ivar + varmap.varstart)), + Cudd_DagSize(GetVar(manager, ivar + varmap.varstart)) ); + startAt = clock(); + if (!iconst) { + if (inegvar) bdd = BDD_Operator(manager, NOT(GetVar(manager, ivar + varmap.varstart)), bdd, curoper, inegoper); + else bdd = BDD_Operator(manager, GetVar(manager, ivar + varmap.varstart), bdd, curoper, inegoper); + } else { + switch(curoper) { + case '+': + if (inegvar ^ inegoper) ; else { + bdd = HIGH(manager); + Cudd_Ref(bdd); + } + break; + case '*': + if (inegvar ^ inegoper) { + bdd = LOW(manager); + Cudd_Ref(bdd); + } + break; + case '#': + if (inegvar ^ inegoper) ; else bdd = NOT(bdd); + break; + } + } + endAt = clock(); + secs = ((double) (endAt - startAt)) / ((double) CLOCKS_PER_SEC); + if (_debug) fprintf(stderr, "term: %s of line: %i took: %i\n", term, iline, endAt - startAt); + //if ((endAt - startAt) > 10000000) Cudd_AutodynDisable(manager); + if (bdd == NULL) { + fprintf(stderr, "Line Parser Error at line: %i. Error using operator %c on term: %s.\n", iline, curoper, term); + free(term); + return NULL; + } + } else { + // Term is an intermediate result + if (IsPosNumber(term + inegvar + 1)) ivar = atoi(term + inegvar + 1) - 1; else { + fprintf(stderr, "Line Parser Error at line: %i. Invalid intermediate result format term: %s.\n", iline, term); + free(term); + return NULL; + } + if (ivar < 0 || ivar > maxinter || inter[ivar] == NULL) { + fprintf(stderr, "Line Parser Error at line: %i. Usage of undeclared intermediate result term: %s.\n", iline, term); + free(term); + return NULL; + } + if (_debug) fprintf(stderr, "%s\n", term); + if (_debug) fprintf(stderr, "PNZ1:%.0f P1:%.0f S1:%i PNZ2:%.0f P2:%.0f S2:%i\n", + Cudd_CountPathsToNonZero(bdd), + Cudd_CountPath(bdd), + Cudd_DagSize(bdd), + Cudd_CountPathsToNonZero(inter[ivar]), + Cudd_CountPath(inter[ivar]), + Cudd_DagSize(inter[ivar]) ); + startAt = clock(); + if (inegvar) bdd = BDD_Operator(manager, NOT(inter[ivar]), bdd, curoper, inegoper); + else bdd = BDD_Operator(manager, inter[ivar], bdd, curoper, inegoper); + endAt = clock(); + secs = ((double) (endAt - startAt)) / ((double) CLOCKS_PER_SEC); + if (_debug) fprintf(stderr, "term: %s of line: %i took: %i\n", term, iline, endAt - startAt); + //if ((endAt - startAt) > 10000000) Cudd_AutodynDisable(manager); + if (bdd == NULL) { + fprintf(stderr, "Line Parser Error at line: %i. Error using operator %c on term: %s.\n", iline, curoper, term); + free(term); + return NULL; + } + } + free(term); + term = NULL; + curoper = function[symbol]; + if (curoper == '=') return bdd; + if (function[symbol - 1] == '~') { + inegoper = 1; + i--; + iend--; + } else { + inegoper = 0; + } + symbol = -1; + } + } + return NULL; +} + +DdNode* BDD_Operator(DdManager *manager, DdNode *bdd1, DdNode *bdd2, char Operator, int inegoper) { + switch (Operator) { + case '+': + if (inegoper) return D_BDDNor(manager, bdd1, bdd2); + else return D_BDDOr(manager, bdd1, bdd2); + break; + case '*': + if (inegoper) return D_BDDNand(manager, bdd1, bdd2); + else return D_BDDAnd(manager, bdd1, bdd2); + break; + case '#': + if (inegoper) return D_BDDXnor(manager, bdd1, bdd2); + else return D_BDDXor(manager, bdd1, bdd2); + break; + default: + return NULL; + break; + } +} + +DdNode* OnlineGenerateBDD(DdManager *manager, namedvars *varmap) { + int icomment, maxlinesize, icur, iline, curinter, iequal, iinters, itmp, i; + DdNode *Line, **inter; + char buf, *inputline, *filename; + bddfileheader interfileheader; + // Initialization of intermediate steps + iinters = 1; + inter = (DdNode **) malloc(sizeof(DdNode *) * iinters); + for (icur = 0; icur < iinters; icur++) inter[icur] = NULL; + // Read file data + interfileheader.inputfile = NULL; + filename = NULL; // For nested files + iequal = 0; // Flag for encountered = sign + icur = 0; // Pointer for inputline buffer location + iline = 1; // Current file line (first after header) + icomment = 0; // Flag for comments + maxlinesize = 80; // inputline starting buffer size + inputline = (char *) malloc(sizeof(char) * maxlinesize); + + do { + buf = fgetc(stdin); + if (buf == ';' || buf == '%' || buf == '$') icomment = 1; + if (buf == '\n') { + if (icomment) icomment = 0; + if (iequal > 1) { + fprintf(stderr, "Error at line: %i. Line contains more than 1 equal(=) signs.\n", iline); + free(inter); + free(inputline); + return NULL; + } else iequal = 0; + if (icur > 0) { + inputline[icur] = '\0'; + if (inputline[0] == '@') { + if (inputline[1] == 'e') { + free(inter); + free(inputline); + exit(0); + } else { + itmp = GetParam(inputline, 1); + if (itmp > varmap->varcnt) + EnlargeNamedVars(varmap, itmp); + itmp = GetParam(inputline, 2); + if (itmp > iinters) { + inter = (DdNode **) realloc(inter, sizeof(DdNode *) * itmp); + for (i = iinters; i < itmp; i++) inter[i] = NULL; + iinters = itmp; + } + } + icur = 0; + } else { + if (inputline[0] != 'L') { + fprintf(stderr, "Error at line: %i. Intermediate results should start with L.\n", iline); + free(inter); + free(inputline); + return NULL; + } + curinter = getInterBDD(inputline); + if (curinter == -1) { + if (inputline[0] == 'L' && IsPosNumber(inputline + 1)) { + curinter = atoi(inputline + 1) - 1; + if (curinter > -1 && curinter < iinters && inter[curinter] != NULL) { + if (_debug) fprintf(stderr, "Returned: %s\n", inputline); + Line = inter[curinter]; + free(inter); + free(inputline); + return Line; + } else { + fprintf(stderr, "Error at line: %i. Return result asked doesn't exist.\n", iline); + free(inter); + free(inputline); + return NULL; + } + } else { + fprintf(stderr, "Error at line: %i. Invalid intermediate result format.\n", iline); + free(inter); + free(inputline); + return NULL; + } + } else if (curinter > -1) { + if (curinter >= iinters) { + inter = (DdNode **) realloc(inter, sizeof(DdNode *) * (curinter + 1)); + for (i = iinters; i < curinter + 1; i++) inter[i] = NULL; + iinters = curinter + 1; + } + if (inter[curinter] == NULL) { + if (_debug) fprintf(stderr, "%i %s\n", curinter, inputline); + filename = getFileName(inputline); + if (filename == NULL) { + Line = OnlineLineParser(manager, varmap, inter, iinters, inputline, iline); + } else { + interfileheader = ReadFileHeader(filename); + if (interfileheader.inputfile == NULL) { + //Line = simpleBDDload(manager, varmap, filename); + Line = NULL; + } else { + Line = FileGenerateBDD(manager, *varmap, interfileheader); + } + if (Line == NULL) fprintf(stderr, "Error at line: %i. Error in nested BDD file: %s.\n", iline, filename); + free(filename); + filename = NULL; + interfileheader.inputfile = NULL; + } + if (Line == NULL) { + free(inter); + free(inputline); + return NULL; + } + inter[curinter] = Line; + icur = 0; + } else if (inter[curinter] != NULL) { + fprintf(stderr, "Error at line: %i. Intermediate results can't be overwritten.\n", iline); + free(inter); + free(inputline); + return NULL; + } + } else { + fprintf(stderr, "Error at line: %i. Intermediate result asked doesn't exist.\n", iline); + free(inter); + free(inputline); + return NULL; + } + } + } + iline++; + } else if (buf != ' ' && buf != '\t' && !icomment) { + if (buf == '=') iequal++; + inputline[icur] = buf; + icur += 1; + if (icur == _maxbufsize) { + fprintf(stderr, "Error: Maximum buffer size(%i) exceeded.\n", _maxbufsize); + free(inter); + free(inputline); + return NULL; + } + while (icur > maxlinesize - 1) { + maxlinesize *= 2; + inputline = (char *) realloc(inputline, sizeof(char) * maxlinesize); + } + } + } while(1); + fprintf(stderr, "Error, file either doesn't end with a blank line or no return result was asked.\n"); + free(inter); + free(inputline); + return NULL; +} + +DdNode* OnlineLineParser(DdManager *manager, namedvars *varmap, DdNode **inter, int maxinter, char *function, int iline) { + int istart, iend, ilength, i, symbol, ivar, inegvar, inegoper, iconst; + long startAt, endAt; + double secs; + DdNode *bdd; + char *term, curoper; + bdd = HIGH(manager); + Cudd_Ref(bdd); + term = NULL; + ivar = -1; + curoper = '*'; + istart = -1; + iend = strlen(function) - 1; + ilength = -1; + symbol = -1; + inegvar = 0; + inegoper = 0; + iconst = 0; + for (i = strlen(function) - 1; i > -1; i--) { + if (symbol == -1 && isOperator(function[i])) { + symbol = i; + istart = i + 1; + ilength = iend - i; + iend = i - 1; + if (ilength > 0 && !(ilength == 1 && function[istart] == '~')) { + term = (char *) malloc(sizeof(char) * (ilength + 1)); + strncpy(term, function + istart, ilength); + term[ilength] = '\0'; + } else { + fprintf(stderr, "Line Parser Error at line: %i. An operator was encounter with no term at its right side.\n", iline); + free(term); + return NULL; + } + } + if (symbol != -1) { + if (term[0] == '~') inegvar = 1; else inegvar = 0; + if (term[0 + inegvar] != 'L') { + // Term is a variable + if (strcmp(term + inegvar, "TRUE") == 0) { + iconst = 1; + } else if (strcmp(term + inegvar, "FALSE") == 0) { + iconst = 1; + inegvar = 1; + } else { + iconst = 0; + ivar = AddNamedVar(*varmap, term + inegvar); + if (ivar == -1) { + EnlargeNamedVars(varmap, varmap->varcnt + 1); + ivar = AddNamedVar(*varmap, term + inegvar); + } + if (ivar == -1) { + fprintf(stderr, "Line Parser Error at line: %i. More BDD variables than the reserved term: %s.\n", iline, term); + free(term); + return NULL; + } + } + if (_debug) fprintf(stderr, "%s\n", term); + if (_debug && !iconst) fprintf(stderr, "PNZ1:%.0f P1:%.0f S1:%i PNZ2:%.0f P2:%.0f S2:%i\n", + Cudd_CountPathsToNonZero(bdd), + Cudd_CountPath(bdd), + Cudd_DagSize(bdd), + Cudd_CountPathsToNonZero(GetVar(manager, ivar + varmap->varstart)), + Cudd_CountPath(GetVar(manager, ivar + varmap->varstart)), + Cudd_DagSize(GetVar(manager, ivar + varmap->varstart)) ); + startAt = clock(); + if (!iconst) { + if (inegvar) bdd = BDD_Operator(manager, NOT(GetVar(manager, ivar + varmap->varstart)), bdd, curoper, inegoper); + else bdd = BDD_Operator(manager, GetVar(manager, ivar + varmap->varstart), bdd, curoper, inegoper); + } else { + switch(curoper) { + case '+': + if (inegvar ^ inegoper) ; else { + bdd = HIGH(manager); + Cudd_Ref(bdd); + } + break; + case '*': + if (inegvar ^ inegoper) { + bdd = LOW(manager); + Cudd_Ref(bdd); + } + break; + case '#': + if (inegvar ^ inegoper) ; else bdd = NOT(bdd); + break; + } + } + endAt = clock(); + secs = ((double) (endAt - startAt)) / ((double) CLOCKS_PER_SEC); + if (_debug) fprintf(stderr, "term: %s of line: %i took: %i\n", term, iline, endAt - startAt); + //if ((endAt - startAt) > 10000000) Cudd_AutodynDisable(manager); + if (bdd == NULL) { + fprintf(stderr, "Line Parser Error at line: %i. Error using operator %c on term: %s.\n", iline, curoper, term); + free(term); + return NULL; + } + } else { + // Term is an intermediate result + if (IsPosNumber(term + inegvar + 1)) ivar = atoi(term + inegvar + 1) - 1; else { + fprintf(stderr, "Line Parser Error at line: %i. Invalid intermediate result format term: %s.\n", iline, term); + free(term); + return NULL; + } + if (ivar < 0 || ivar > maxinter || inter[ivar] == NULL) { + fprintf(stderr, "Line Parser Error at line: %i. Usage of undeclared intermediate result term: %s.\n", iline, term); + free(term); + return NULL; + } + if (_debug) fprintf(stderr, "%s\n", term); + if (_debug) fprintf(stderr, "PNZ1:%.0f P1:%.0f S1:%i PNZ2:%.0f P2:%.0f S2:%i\n", + Cudd_CountPathsToNonZero(bdd), + Cudd_CountPath(bdd), + Cudd_DagSize(bdd), + Cudd_CountPathsToNonZero(inter[ivar]), + Cudd_CountPath(inter[ivar]), + Cudd_DagSize(inter[ivar]) ); + startAt = clock(); + if (inegvar) bdd = BDD_Operator(manager, NOT(inter[ivar]), bdd, curoper, inegoper); + else bdd = BDD_Operator(manager, inter[ivar], bdd, curoper, inegoper); + endAt = clock(); + secs = ((double) (endAt - startAt)) / ((double) CLOCKS_PER_SEC); + if (_debug) fprintf(stderr, "term: %s of line: %i took: %i\n", term, iline, endAt - startAt); + //if ((endAt - startAt) > 10000000) Cudd_AutodynDisable(manager); + if (bdd == NULL) { + fprintf(stderr, "Line Parser Error at line: %i. Error using operator %c on term: %s.\n", iline, curoper, term); + free(term); + return NULL; + } + } + free(term); + term = NULL; + curoper = function[symbol]; + if (curoper == '=') return bdd; + if (function[symbol - 1] == '~') { + inegoper = 1; + i--; + iend--; + } else { + inegoper = 0; + } + symbol = -1; + } + } + return NULL; +} + +int GetParam(char *inputline, int iParam) { + int icoma, istart, iend, ret; + char *numb; + istart = 1; + icoma = istart; + iend = strlen(inputline); + while((inputline[icoma] != ',') && (icoma < iend)) + icoma++; + if (iParam == 1) { + numb = (char *) malloc(sizeof(char) * icoma); + strncpy(numb, inputline + 1, icoma - 1); + numb[icoma - 1] = '\0'; + if (IsPosNumber(numb)) { + ret = atoi(numb); + free(numb); + return ret; + } + } else if(iParam == 2) { + numb = (char *) malloc(sizeof(char) * (iend - icoma + 1)); + strncpy(numb, inputline + icoma + 1, iend - icoma); + numb[iend - icoma] = '\0'; + if (IsPosNumber(numb)) { + ret = atoi(numb); + free(numb); + return ret; + } + } + return 0; +} + +void onlinetraverse(DdManager *manager, namedvars varmap, hisqueue *HisQueue, DdNode *bdd) { + char buf, *inputline; + int icur, maxlinesize, iline, index, iloop, ivalue, iQsize, i, inQ; + double dvalue; + DdNode **Q, **Q2, *h_node, *l_node, *curnode; + hisqueue *his; + hisnode *hnode; + iloop = 1; + icur = 0; // Pointer for inputline buffer location + iline = 1; // Current file line (first after header) + maxlinesize = 80; // inputline starting buffer size + inputline = (char *) malloc(sizeof(char) * maxlinesize); + curnode = bdd; + iQsize = 0; + Q = (DdNode **) malloc(sizeof(DdNode *) * iQsize); + Q2 = NULL; + his = InitHistory(varmap.varcnt); + do { + buf = fgetc(stdin); + if (buf == '\n') { + inputline[icur] = '\0'; + if ((icur > 0) && (inputline[0] == '@') && (inputline[2] == ',' || inputline[2] == '\0')) { + switch(inputline[1]) { + case 'c': + printf("bdd_temp_value('%s', %i).\n", GetNodeVarNameDisp(manager, varmap, curnode), iQsize); + break; + case 'n': + if (curnode != HIGH(manager) && curnode != LOW(manager) && (hnode = GetNode(his, varmap.varstart, curnode)) == NULL) { + AddNode(his, varmap.varstart, curnode, 0.0, 0, NULL); + l_node = LowNodeOf(manager, curnode); + h_node = HighNodeOf(manager, curnode); + inQ = 0; + for(i = 0; (i < iQsize / 2) && (inQ < 3); i++) + inQ = (Q[i] == l_node) || (Q[iQsize - i] == l_node) + 2 * (Q[i] == h_node) || (Q[iQsize - i] == h_node); + if (inQ & 1 == 0) inQ = inQ + (GetNode(his, varmap.varstart, l_node) != NULL); + if (inQ & 2 == 0) inQ = inQ + 2 * (GetNode(his, varmap.varstart, h_node) != NULL); + if (inQ & 1 == 1) inQ = inQ - (l_node == HIGH(manager) || l_node == LOW(manager)); + if (inQ & 2 == 2) inQ = inQ - 2 * (h_node == HIGH(manager) || h_node == LOW(manager)); + switch(inQ) { + case 0: + iQsize += 2; + Q = (DdNode **) realloc(Q, sizeof(DdNode *) * iQsize); + Q[iQsize - 2] = l_node; + Q[iQsize - 1] = h_node; + break; + case 1: + iQsize++; + Q = (DdNode **) realloc(Q, sizeof(DdNode *) * iQsize); + Q[iQsize - 1] = h_node; + break; + case 2: + iQsize++; + Q = (DdNode **) realloc(Q, sizeof(DdNode *) * iQsize); + Q[iQsize - 1] = l_node; + break; + case 3: + break; + default: + break; + } + } + if (inputline[2] == '\0' || strcmp(inputline + 3, "DFS") == 0) { + if (iQsize > 0) { + iQsize--; + curnode = Q[iQsize]; + Q = (DdNode **) realloc(Q, sizeof(DdNode *) * iQsize); + } + } else if (strcmp(inputline + 3, "BFS") == 0) { + if (iQsize > 0) { + iQsize--; + curnode = Q[0]; + Q2 = (DdNode **) malloc(sizeof(DdNode *) * iQsize); + for(i = 0; i < iQsize; i++) + Q2[i] = Q[i + 1]; + free(Q); + Q = Q2; + } + } else { + fprintf(stderr, "Error: Could not find method: %s, Correct syntax @n,[DFS, BFS].\n", inputline + 3); + free(Q); + free(inputline); + exit(-1); + } + break; + case 'h': + printf("bdd_temp_value('%s').\n", GetNodeVarNameDisp(manager, varmap, HighNodeOf(manager, curnode))); + break; + case 'l': + printf("bdd_temp_value('%s').\n", GetNodeVarNameDisp(manager, varmap, LowNodeOf(manager, curnode))); + break; + case 'v': + index = GetNamedVarIndex(varmap, inputline + 3); + if (index >= 0) { + fprintf(stdout, "bdd_temp_value([%f,%i,%s]).\n", varmap.dvalue[index], varmap.ivalue[index], (char *) varmap.dynvalue[index]); + } else { + fprintf(stderr, "Error: Could not find variable: %s, Correct syntax @v,[variable name].\n", inputline + 3); + free(Q); + free(inputline); + exit(-1); + } + break; + case 'e': + iloop = 0; + break; + default: + fprintf(stderr, "Error: Not recognizable instruction: %s.\n", inputline); + free(Q); + free(inputline); + exit(-1); + break; + } + icur = 0; + } else { + fprintf(stderr, "Error: Not recognizable instruction: %s.\n", inputline); + free(Q); + free(inputline); + exit(-1); + } + iline++; + } else if (buf != ' ' && buf != '\t') { + inputline[icur] = buf; + icur += 1; + if (icur == _maxbufsize) { + fprintf(stderr, "Error: Maximum buffer size(%i) exceeded.\n", _maxbufsize); + free(Q); + free(inputline); + exit(-1); + } + while (icur > maxlinesize - 1) { + maxlinesize *= 2; + inputline = (char *) realloc(inputline, sizeof(char) * maxlinesize); + } + } + } while(iloop); + free(Q); + free(inputline); +} diff --git a/ProbLog/simplecudd/simplecudd.h b/ProbLog/simplecudd/simplecudd.h new file mode 100644 index 000000000..49ed3a5aa --- /dev/null +++ b/ProbLog/simplecudd/simplecudd.h @@ -0,0 +1,287 @@ +/******************************************************************************\ +* * +* SimpleCUDD library (www.cs.kuleuven.be/~theo/tools/simplecudd.html) * +* SimpleCUDD was developed at Katholieke Universiteit Leuven(www.kuleuven.be) * +* * +* Copyright T. Mantadelis and Katholieke Universiteit Leuven 2008 * +* * +* Author: Theofrastos Mantadelis * +* File: simplecudd.h * +* * +******************************************************************************** +* * +* The "Artistic License" * +* * +* Preamble * +* * +* The intent of this document is to state the conditions under which a * +* Package may be copied, such that the Copyright Holder maintains some * +* semblance of artistic control over the development of the package, * +* while giving the users of the package the right to use and distribute * +* the Package in a more-or-less customary fashion, plus the right to make * +* reasonable modifications. * +* * +* Definitions: * +* * +* "Package" refers to the collection of files distributed by the * +* Copyright Holder, and derivatives of that collection of files * +* created through textual modification. * +* * +* "Standard Version" refers to such a Package if it has not been * +* modified, or has been modified in accordance with the wishes * +* of the Copyright Holder as specified below. * +* * +* "Copyright Holder" is whoever is named in the copyright or * +* copyrights for the package. * +* * +* "You" is you, if you're thinking about copying or distributing * +* this Package. * +* * +* "Reasonable copying fee" is whatever you can justify on the * +* basis of media cost, duplication charges, time of people involved, * +* and so on. (You will not be required to justify it to the * +* Copyright Holder, but only to the computing community at large * +* as a market that must bear the fee.) * +* * +* "Freely Available" means that no fee is charged for the item * +* itself, though there may be fees involved in handling the item. * +* It also means that recipients of the item may redistribute it * +* under the same conditions they received it. * +* * +* 1. You may make and give away verbatim copies of the source form of the * +* Standard Version of this Package without restriction, provided that you * +* duplicate all of the original copyright notices and associated disclaimers. * +* * +* 2. You may apply bug fixes, portability fixes and other modifications * +* derived from the Public Domain or from the Copyright Holder. A Package * +* modified in such a way shall still be considered the Standard Version. * +* * +* 3. You may otherwise modify your copy of this Package in any way, provided * +* that you insert a prominent notice in each changed file stating how and * +* when you changed that file, and provided that you do at least ONE of the * +* following: * +* * +* a) place your modifications in the Public Domain or otherwise make them * +* Freely Available, such as by posting said modifications to Usenet or * +* an equivalent medium, or placing the modifications on a major archive * +* site such as uunet.uu.net, or by allowing the Copyright Holder to include * +* your modifications in the Standard Version of the Package. * +* * +* b) use the modified Package only within your corporation or organization. * +* * +* c) rename any non-standard executables so the names do not conflict * +* with standard executables, which must also be provided, and provide * +* a separate manual page for each non-standard executable that clearly * +* documents how it differs from the Standard Version. * +* * +* d) make other distribution arrangements with the Copyright Holder. * +* * +* 4. You may distribute the programs of this Package in object code or * +* executable form, provided that you do at least ONE of the following: * +* * +* a) distribute a Standard Version of the executables and library files, * +* together with instructions (in the manual page or equivalent) on where * +* to get the Standard Version. * +* * +* b) accompany the distribution with the machine-readable source of * +* the Package with your modifications. * +* * +* c) give non-standard executables non-standard names, and clearly * +* document the differences in manual pages (or equivalent), together * +* with instructions on where to get the Standard Version. * +* * +* d) make other distribution arrangements with the Copyright Holder. * +* * +* 5. You may charge a reasonable copying fee for any distribution of this * +* Package. You may charge any fee you choose for support of this * +* Package. You may not charge a fee for this Package itself. However, * +* you may distribute this Package in aggregate with other (possibly * +* commercial) programs as part of a larger (possibly commercial) software * +* distribution provided that you do not advertise this Package as a * +* product of your own. You may embed this Package's interpreter within * +* an executable of yours (by linking); this shall be construed as a mere * +* form of aggregation, provided that the complete Standard Version of the * +* interpreter is so embedded. * +* * +* 6. The scripts and library files supplied as input to or produced as * +* output from the programs of this Package do not automatically fall * +* under the copyright of this Package, but belong to whoever generated * +* them, and may be sold commercially, and may be aggregated with this * +* Package. If such scripts or library files are aggregated with this * +* Package via the so-called "undump" or "unexec" methods of producing a * +* binary executable image, then distribution of such an image shall * +* neither be construed as a distribution of this Package nor shall it * +* fall under the restrictions of Paragraphs 3 and 4, provided that you do * +* not represent such an executable image as a Standard Version of this * +* Package. * +* * +* 7. C subroutines (or comparably compiled subroutines in other * +* languages) supplied by you and linked into this Package in order to * +* emulate subroutines and variables of the language defined by this * +* Package shall not be considered part of this Package, but are the * +* equivalent of input as in Paragraph 6, provided these subroutines do * +* not change the language in any way that would cause it to fail the * +* regression tests for the language. * +* * +* 8. Aggregation of this Package with a commercial distribution is always * +* permitted provided that the use of this Package is embedded; that is, * +* when no overt attempt is made to make this Package's interfaces visible * +* to the end user of the commercial distribution. Such use shall not be * +* construed as a distribution of this Package. * +* * +* 9. The name of the Copyright Holder may not be used to endorse or promote * +* products derived from this software without specific prior written * +* permission. * +* * +* 10. THIS PACKAGE IS PROVIDED "AS IS" AND WITHOUT ANY EXPRESS OR * +* IMPLIED WARRANTIES, INCLUDING, WITHOUT LIMITATION, THE IMPLIED * +* WARRANTIES OF MERCHANTIBILITY AND FITNESS FOR A PARTICULAR PURPOSE. * +* * +* The End * +* * +\******************************************************************************/ + + +#include +#include +#include +#include +#include +#include "util.h" +#include "cudd.h" +#include "cuddInt.h" +#include "general.h" + +#define IsHigh(manager, node) HIGH(manager) == node +#define IsLow(manager, node) LOW(manager) == node +#define HIGH(manager) Cudd_ReadOne(manager) +#define LOW(manager) Cudd_Not(Cudd_ReadOne(manager)) +#define NOT(node) Cudd_Not(node) +#define GetIndex(node) Cudd_NodeReadIndex(node) +#define GetVar(manager, index) Cudd_bddIthVar(manager, index) +#define NewVar(manager) Cudd_bddNewVar(manager) +#define KillBDD(manager) Cudd_Quit(manager) +#define GetVarCount(manager) Cudd_ReadSize(manager) +#define DEBUGON _debug = 1 +#define DEBUGOFF _debug = 0 +#define RAPIDLOADON _RapidLoad = 1 +#define RAPIDLOADOFF _RapidLoad = 0 +#define SETMAXBUFSIZE(size) _maxbufsize = size +#define BDDFILE_ERROR -1 +#define BDDFILE_OTHER 0 +#define BDDFILE_SCRIPT 1 +#define BDDFILE_NODEDUMP 2 + +extern int _RapidLoad; +extern int _debug; +extern int _maxbufsize; + +typedef struct _bddfileheader { + FILE *inputfile; + int version; + int varcnt; + int varstart; + int intercnt; + int filetype; +} bddfileheader; + +typedef struct _namedvars { + int varcnt; + int varstart; + char **vars; + int *loaded; + double *dvalue; + int *ivalue; + void **dynvalue; +} namedvars; + +typedef struct _hisnode { + DdNode *key; + double dvalue; + int ivalue; + void *dynvalue; +} hisnode; + +typedef struct _hisqueue { + int cnt; + hisnode *thenode; +} hisqueue; + +typedef struct _nodeline { + char *varname; + char *truevar; + char *falsevar; + int nodenum; + int truenode; + int falsenode; +} nodeline; + +/* Initialization */ + +DdManager* simpleBDDinit(int varcnt); + +/* BDD Generation */ + +DdNode* D_BDDAnd(DdManager *manager, DdNode *bdd1, DdNode *bdd2); +DdNode* D_BDDNand(DdManager *manager, DdNode *bdd1, DdNode *bdd2); +DdNode* D_BDDOr(DdManager *manager, DdNode *bdd1, DdNode *bdd2); +DdNode* D_BDDNor(DdManager *manager, DdNode *bdd1, DdNode *bdd2); +DdNode* D_BDDXor(DdManager *manager, DdNode *bdd1, DdNode *bdd2); +DdNode* D_BDDXnor(DdManager *manager, DdNode *bdd1, DdNode *bdd2); + +DdNode* FileGenerateBDD(DdManager *manager, namedvars varmap, bddfileheader fileheader); +DdNode* OnlineGenerateBDD(DdManager *manager, namedvars *varmap); +DdNode* LineParser(DdManager *manager, namedvars varmap, DdNode **inter, int maxinter, char *function, int iline); +DdNode* OnlineLineParser(DdManager *manager, namedvars *varmap, DdNode **inter, int maxinter, char *function, int iline); +DdNode* BDD_Operator(DdManager *manager, DdNode *bdd1, DdNode *bdd2, char Operator, int inegoper); +int getInterBDD(char *function); +char* getFileName(const char *function); +int GetParam(char *inputline, int iParam); +int LoadVariableData(namedvars varmap, char *filename); + +/* Named variables */ + +namedvars InitNamedVars(int varcnt, int varstart); +void EnlargeNamedVars(namedvars *varmap, int newvarcnt); +int AddNamedVarAt(namedvars varmap, const char *varname, int index); +int AddNamedVar(namedvars varmap, const char *varname); +void SetNamedVarValuesAt(namedvars varmap, int index, double dvalue, int ivalue, void *dynvalue); +int SetNamedVarValues(namedvars varmap, const char *varname, double dvalue, int ivalue, void *dynvalue); +int GetNamedVarIndex(const namedvars varmap, const char *varname); +int RepairVarcnt(namedvars *varmap); +char* GetNodeVarName(DdManager *manager, namedvars varmap, DdNode *node); +char* GetNodeVarNameDisp(DdManager *manager, namedvars varmap, DdNode *node); +int all_loaded(namedvars varmap, int disp); + +/* Traversal */ + +DdNode* HighNodeOf(DdManager *manager, DdNode *node); +DdNode* LowNodeOf(DdManager *manager, DdNode *node); + +/* Traversal - History */ + +hisqueue* InitHistory(int varcnt); +void ReInitHistory(hisqueue *HisQueue, int varcnt); +void AddNode(hisqueue *HisQueue, int varstart, DdNode *node, double dvalue, int ivalue, void *dynvalue); +hisnode* GetNode(hisqueue *HisQueue, int varstart, DdNode *node); +int GetNodeIndex(hisqueue *HisQueue, int varstart, DdNode *node); +void onlinetraverse(DdManager *manager, namedvars varmap, hisqueue *HisQueue, DdNode *bdd); + +/* Save-load */ + +bddfileheader ReadFileHeader(char *filename); +int CheckFileVersion(const char *version); + +DdNode * LoadNodeDump(DdManager *manager, namedvars varmap, FILE *inputfile); +DdNode * LoadNodeRec(DdManager *manager, namedvars varmap, hisqueue *Nodes, FILE *inputfile, nodeline current); +DdNode * GetIfExists(DdManager *manager, namedvars varmap, hisqueue *Nodes, char *varname, int nodenum); + +int SaveNodeDump(DdManager *manager, namedvars varmap, DdNode *bdd, char *filename); +void SaveExpand(DdManager *manager, namedvars varmap, hisqueue *Nodes, DdNode *Current, FILE *outputfile); +void ExpandNodes(hisqueue *Nodes, int index, int nodenum); + +/* Export */ + +int simpleBDDtoDot(DdManager *manager, DdNode *bdd, char *filename); +int simpleNamedBDDtoDot(DdManager *manager, namedvars varmap, DdNode *bdd, char *filename); +