Commit e4a70d7e authored by Leo Gordon's avatar Leo Gordon
Browse files

Incorporate ENV hash into the tree of possible options in order to be able to...

Incorporate ENV hash into the tree of possible options in order to be able to "require" a value. And a bit of config inheritance cleanup.
parent f48fbeb0
......@@ -185,7 +185,7 @@ sub process_options {
$self->$method();
}
my $possibly_used_options = {};
my $possibly_used_options = { 'ENV' => \%ENV };
$self->root( $possibly_used_options );
# the first run of this method allows us to collect possibly_used_options
......
......@@ -36,18 +36,10 @@ use base ('Bio::EnsEMBL::Hive::PipeConfig::HiveGeneric_conf'); # All Hive datab
sub default_options {
my ($self) = @_;
return {
'ensembl_cvs_root_dir' => $ENV{'ENSEMBL_CVS_ROOT_DIR'}, # it will make sense to set this variable if you are going to use ehive frequently
%{ $self->SUPER::default_options() }, # inherit other stuff from the base class
'pipeline_name' => 'apply_to_databases', # name used by the beekeeper to prefix job names on the farm
'pipeline_db' => { # connection parameters
-host => 'compara3',
-port => 3306,
-user => 'ensadmin',
-pass => $self->o('password'), # a rule where a previously undefined parameter is used (which makes either of them obligatory)
-dbname => $ENV{USER}.'_'.$self->o('pipeline_name'), # a rule where a previously defined parameter is used (which makes both of them optional)
},
'source_server1' => {
-host => 'ens-staging',
-port => 3306,
......
......@@ -51,18 +51,10 @@ use base ('Bio::EnsEMBL::Hive::PipeConfig::HiveGeneric_conf'); # All Hive datab
sub default_options {
my ($self) = @_;
return {
'ensembl_cvs_root_dir' => $ENV{'ENSEMBL_CVS_ROOT_DIR'}, # it will make sense to set this variable if you are going to use ehive frequently
%{ $self->SUPER::default_options() }, # inherit other stuff from the base class
'pipeline_name' => 'failure_test', # name used by the beekeeper to prefix job names on the farm
'pipeline_db' => { # connection parameters
-host => 'compara3',
-port => 3306,
-user => 'ensadmin',
-pass => $self->o('password'), # a rule where a previously undefined parameter is used (which makes either of them obligatory)
-dbname => $ENV{USER}.'_'.$self->o('pipeline_name'), # a rule where a previously defined parameter is used (which makes both of them optional)
},
'job_count' => 20, # controls the total number of FailureTest jobs
'failure_rate' => 3, # controls the rate of jobs that are programmed to fail
'state' => 'RUN', # controls in which state the jobs are programmed to fail
......
......@@ -51,18 +51,10 @@ use base ('Bio::EnsEMBL::Hive::PipeConfig::HiveGeneric_conf'); # All Hive datab
sub default_options {
my ($self) = @_;
return {
'ensembl_cvs_root_dir' => $ENV{'ENSEMBL_CVS_ROOT_DIR'}, # it will make sense to set this variable if you are going to use ehive frequently
%{ $self->SUPER::default_options() }, # inherit other stuff from the base class
'pipeline_name' => 'zip_unzip_files', # name used by the beekeeper to prefix job names on the farm
'pipeline_db' => { # connection parameters
-host => 'compara3',
-port => 3306,
-user => 'ensadmin',
-pass => $self->o('password'), # a rule where a previously undefined parameter is used (which makes either of them obligatory)
-dbname => $ENV{USER}.'_'.$self->o('pipeline_name'), # a rule where a previously defined parameter is used (which makes both of them optional)
},
'unzip' => 0, # set to '1' to switch to decompression
'only_files' => '*', # use '*.sql*' to only (un)zip these files
'zipping_capacity' => 10, # how many files can be (un)zipped in parallel
......
......@@ -69,16 +69,18 @@ use base ('Bio::EnsEMBL::Hive::DependentOptions');
sub default_options {
my ($self) = @_;
return {
'ensembl_cvs_root_dir' => $ENV{'ENSEMBL_CVS_ROOT_DIR'}, # it will make sense to set this variable if you are going to use ehive frequently
'ensembl_cvs_root_dir' => $self->o('ENV', 'ENSEMBL_CVS_ROOT_DIR'), # it will make sense to set this variable if you are going to use ehive frequently
'password' => $self->o('ENV', 'ENSADMIN_PSW'), # people will have to make an effort NOT to insert it into config files like .bashrc etc
'host' => 'localhost',
'pipeline_name' => 'hive_generic',
'pipeline_db' => {
-host => 'compara3',
-host => $self->o('host'),
-port => 3306,
-user => 'ensadmin',
-pass => $self->o('password'),
-dbname => $ENV{'USER'}.'_'.$self->o('pipeline_name'), # example of a linked definition (resolved via saturation)
-dbname => $self->o('ENV', 'USER').'_'.$self->o('pipeline_name'), # example of a linked definition (resolved via saturation)
},
};
}
......
......@@ -67,20 +67,13 @@ use base ('Bio::EnsEMBL::Hive::PipeConfig::HiveGeneric_conf'); # All Hive datab
sub default_options {
my ($self) = @_;
return {
'ensembl_cvs_root_dir' => $ENV{'ENSEMBL_CVS_ROOT_DIR'}, # it will make sense to set this variable if you are going to use ehive frequently
%{ $self->SUPER::default_options() }, # inherit other stuff from the base class
'pipeline_name' => 'long_mult', # name used by the beekeeper to prefix job names on the farm
'pipeline_db' => { # connection parameters
-host => 'compara2',
-port => 3306,
-user => 'ensadmin',
-pass => $self->o('password'), # a rule where a previously undefined parameter is used (which makes either of them obligatory)
-dbname => $ENV{USER}.'_'.$self->o('pipeline_name'), # a rule where a previously defined parameter is used (which makes both of them optional)
},
'first_mult' => '9650156169', # the actual numbers that will be multiplied must also be possible to specify from the command line
'first_mult' => '9650156169', # the actual numbers to be multiplied can also be specified from the command line
'second_mult' => '327358788',
};
}
......
......@@ -44,19 +44,11 @@ use base ('Bio::EnsEMBL::Hive::PipeConfig::HiveGeneric_conf'); # All Hive datab
sub default_options {
my ($self) = @_;
return {
'ensembl_cvs_root_dir' => $ENV{'ENSEMBL_CVS_ROOT_DIR'}, # it will make sense to set this variable if you are going to use ehive frequently
%{ $self->SUPER::default_options() }, # inherit other stuff from the base class
'pipeline_name' => 'sema_long_mult', # name used by the beekeeper to prefix job names on the farm
'pipeline_db' => { # connection parameters
-host => 'compara3',
-port => 3306,
-user => 'ensadmin',
-pass => $self->o('password'), # a rule where a previously undefined parameter is used (which makes either of them obligatory)
-dbname => $ENV{USER}.'_'.$self->o('pipeline_name'), # a rule where a previously defined parameter is used (which makes both of them optional)
},
'first_mult' => '9650516169', # the actual numbers that will be multiplied must also be possible to specify from the command line
'first_mult' => '9650516169', # the actual numbers to be multiplied can also be specified from the command line
'second_mult' => '327358788',
};
}
......
......@@ -53,18 +53,10 @@ use base ('Bio::EnsEMBL::Hive::PipeConfig::HiveGeneric_conf'); # All Hive datab
sub default_options {
my ($self) = @_;
return {
'ensembl_cvs_root_dir' => $ENV{'ENSEMBL_CVS_ROOT_DIR'}, # it will make sense to set this variable if you are going to use ehive frequently
%{ $self->SUPER::default_options() }, # inherit other stuff from the base class
'pipeline_name' => 'zip_tables', # name used by the beekeeper to prefix job names on the farm
'pipeline_db' => { # connection parameters
-host => 'compara2',
-port => 3306,
-user => 'ensadmin',
-pass => $self->o('password'), # a rule where a previously undefined parameter is used (which makes either of them obligatory)
-dbname => $ENV{USER}.'_'.$self->o('pipeline_name'), # a rule where a previously defined parameter is used (which makes both of them optional)
},
'source_db' => {
-host => 'compara2',
-port => 3306,
......@@ -76,7 +68,7 @@ sub default_options {
'with_schema' => 1, # include table creation statement before inserting the data
'only_tables' => '%', # use 'protein_tree%' or 'analysis%' to only dump those tables
'invert_selection' => 0, # use 'NOT LIKE' instead of 'LIKE'
'target_dir' => $ENV{'HOME'}.'/'.$self->o('source_dbname'), # where we want the compressed files to appear
'target_dir' => $self->o('ENV', 'HOME').'/'.$self->o('source_dbname'), # where we want the compressed files to appear
'dumping_capacity' => 10, # how many tables can be dumped in parallel
};
}
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment