$sth=$self->prepare("INSERT INTO analysis_stats_monitor SELECT now(), analysis_stats.* from analysis_stats WHERE analysis_id = ".$stats->analysis_id);
$sth=$self->prepare("INSERT INTO analysis_stats_monitor SELECT CURRENT_TIMESTAMP, analysis_stats.* from analysis_stats WHERE analysis_id = ".$stats->analysis_id);
$sth->execute();
$sth->finish;
$stats->seconds_since_last_update(0);#not exact but good enough :)
my$sql="SELECT column_name,column_key,extra FROM information_schema.columns WHERE table_schema='$dbname' and table_name='$table_name'";
my$sql={
'mysql'=>"SELECT column_name AS name, column_key='PRI' AS pk, extra='auto_increment' AS ai FROM information_schema.columns WHERE table_schema='$dbname' and table_name='$table_name'",
'sqlite'=>"PRAGMA table_info('$table_name')",
}->{$driver}ordie"could not find column info for driver='$driver'";
Description : Interface method that should return a hash of pipeline_wide_parameter_name->pipeline_wide_parameter_value pairs.
...
...
@@ -112,6 +125,7 @@ sub pipeline_wide_parameters {
};
}
=head2 resource_classes
Description : Interface method that should return a hash of resource_description_id->resource_description_hash.
...
...
@@ -127,6 +141,7 @@ sub resource_classes {
};
}
=head2 pipeline_analyses
Description : Interface method that should return a list of hashes that define analysis bundled with corresponding jobs, dataflow and analysis_ctrl rules and resource_id.
...
...
@@ -160,6 +175,7 @@ sub new {
return$self;
}
=head2 o
Description : This is the method you call in the interface methods when you need to substitute an option: $self->o('password') .
...
...
@@ -187,6 +203,7 @@ sub o { # descends the option hash structure (vivifying all enco
return$value;
}
=head2 dbconn_2_mysql
Description : A convenience method used to stringify a connection-parameters hash into a parameter string that both mysql and beekeeper.pl can understand
...
...
@@ -203,6 +220,37 @@ sub dbconn_2_mysql { # will save you a lot of typing
.($with_db?($self->o($db_conn,'-dbname').''):'');
}
=head2 db_connect_command
Description : A convenience method used to stringify a command to connect to the db OR pipe an sql file into it.
=cut
sub db_connect_command{
my($self,$db_conn)=@_;
return($hive_default_drivereq'sqlite')
?'sqlite3 '.$self->o($db_conn,'-dbname')
:'mysql '.$self->dbconn_2_mysql($db_conn,1);
}
=head2 db_execute_command
Description : A convenience method used to stringify a command to connect to the db OR pipe an sql file into it.
usebase('Bio::EnsEMBL::Hive::PipeConfig::HiveGeneric_conf');# All Hive databases configuration files should inherit from HiveGeneric, directly or indirectly
# EXPERIMENTAL: choose either 'mysql' or 'sqlite' and do not forget to provide the correct connection parameters:
Description : Implements default_options() interface method of Bio::EnsEMBL::Hive::PipeConfig::HiveGeneric_conf that is used to initialize default options.
...
...
@@ -72,6 +77,7 @@ sub default_options {
'pipeline_name'=>'long_mult',# name used by the beekeeper to prefix job names on the farm
@{$self->SUPER::pipeline_create_commands},# inheriting database and hive tables' creation
# additional tables needed for long multiplication pipeline's operation:
'mysql '.$self->dbconn_2_mysql('pipeline_db',1)." -e 'CREATE TABLE intermediate_result (a_multiplier char(40) NOT NULL, digit tinyint NOT NULL, result char(41) NOT NULL, PRIMARY KEY (a_multiplier, digit))'",
'mysql '.$self->dbconn_2_mysql('pipeline_db',1)." -e 'CREATE TABLE final_result (a_multiplier char(40) NOT NULL, b_multiplier char(40) NOT NULL, result char(80) NOT NULL, PRIMARY KEY (a_multiplier, b_multiplier))'",
$self->db_execute_command('pipeline_db','CREATE TABLE intermediate_result (a_multiplier char(40) NOT NULL, digit tinyint NOT NULL, result char(41) NOT NULL, PRIMARY KEY (a_multiplier, digit))'),
$self->db_execute_command('pipeline_db','CREATE TABLE final_result (a_multiplier char(40) NOT NULL, b_multiplier char(40) NOT NULL, result char(80) NOT NULL, PRIMARY KEY (a_multiplier, b_multiplier))'),
];
}
...
...
@@ -143,7 +149,7 @@ sub pipeline_analyses {
# (jobs for this analysis will be flown_into via branch-2 from 'start' jobs above)
],
-flow_into=>{
'MAIN'=>['mysql:////intermediate_result'],
'MAIN'=>[':////intermediate_result'],
},
},
...
...
@@ -155,7 +161,7 @@ sub pipeline_analyses {
],
-wait_for=>['part_multiply'],# we can only start adding when all partial products have been computed