should make the Hydra scheduler a lot less CPU-intensive, since it won't run hydra_eval_jobs all the time.
5SHCWE7XPQORSOLY7HGAIK2ODKBFPY4KVXRL2W7X6D4WALU544HQC 3XTHEUMP2ZOMPQWE3S5QWHIHCEJNEXGDPQB3JUVZFPS3RFMY455QC OOQ2D3KCLFPYNAN253PHWLBQMB6OMO2KYQWQXLTP65SQAYZWQ5LAC D5QIOJGPKQJIYBUCSC3MFJ3TXLPNZ2XMI37GXMFRVRFWWR2VMTFAC KN3VYE5P2RJB3KZ355LA5C2T2D5S2IR3QZFE53AJIWUVMETEEYDAC M552HLIAP52D42AVXVC5SGROAYN2TBCEUZOXESWEMBBUX7G3U6TAC X27GNHDV5KPZ5GSH6DCAJMNCEMZLCP7M43JWF2X3O5QWXMOX273AC 3HCBU2FAXZMSF4JJR5Q64BSN66MBOGVETNHK33V2WSNDGOF4HHQAC BVOPAMLSAU4UTV3DUX53OYDMXP2SETAQVUKAYE2OTCVVN4RD7LLQC RWIBJ5L4MDV4XQIQXBB45RBVAK2IAK3IMP37YUX2SHW7YDKDAAXAC N22GPKYTOLZLBGTGDATQDVZ4R5APZEAOIA7L32X4UXBH4XNI7MWAC FDE3BJAPDEP3BYT5A5GEGLNXPPZLA2KTGXB4ZNYRP4LJ7IFRKYXAC POPU2ATH2HHBTGHKRAV3EY2K55P664IARI3YJGLDKVJ6PQPXBQ4AC S6OISBQ3HPFHAAQ5ENG7N3MNGOPNEJPIFKSSA5N4G6KJQTQBSSLQC S5PV6IIMKJ7PGWIFLLXERHYF3BCP2UEGFRZEZLD6UUBLVEZXJLUAC RBNQKATLSAKTGW2IRNB5CRV3SEH5F6E4BPVWX4BII7MH5TCIPINQC package Hydra::Schema::JobsetInputHashes;# Created by DBIx::Class::Schema::Loader# DO NOT MODIFY THE FIRST PART OF THIS FILEuse strict;use warnings;use base 'DBIx::Class';__PACKAGE__->load_components("Core");__PACKAGE__->table("JobsetInputHashes");__PACKAGE__->add_columns("project",{data_type => "text",default_value => undef,is_foreign_key => 1,is_nullable => 0,size => undef,},"jobset",{data_type => "text",default_value => undef,is_foreign_key => 1,is_nullable => 0,size => undef,},"hash",{data_type => "text",default_value => undef,is_nullable => 0,size => undef,},"timestamp",{data_type => "integer",default_value => undef,is_nullable => 0,size => undef,},);__PACKAGE__->set_primary_key("project", "jobset", "hash");__PACKAGE__->belongs_to("project", "Hydra::Schema::Projects", { name => "project" });__PACKAGE__->belongs_to("jobset","Hydra::Schema::Jobsets",{ name => "jobset", project => "project" },);# Created by DBIx::Class::Schema::Loader v0.04999_09 @ 2009-11-17 14:04:55# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:f8/4vTSQJbmAh/0PZHeFDg# You can replace this text with custom content, and it will be preserved on regeneration1;
# Created by DBIx::Class::Schema::Loader v0.04999_09 @ 2009-10-23 16:56:03# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:M+eetraKtSfF8q3cqJhEPw
# Created by DBIx::Class::Schema::Loader v0.04999_09 @ 2009-11-17 14:04:55# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:xWsqXneZw90uEw/vcEXc4w
# Created by DBIx::Class::Schema::Loader v0.04999_09 @ 2009-10-23 16:56:03# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:zhL+ArisX2ZFU0NPIuDLdw
# Created by DBIx::Class::Schema::Loader v0.04999_09 @ 2009-11-17 14:04:55# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:dWe2DEsuZuOjVj4IA8TwQg
# Hash the arguments to hydra_eval_jobs and check the# JobsetInputHashes to see if we've already evaluated this set of# inputs. If so, bail out.my @args = ($jobset->nixexprinput, $jobset->nixexprpath, inputsToArgs($inputInfo));my $argsHash = sha256_hex("@args");if ($jobset->jobsetinputhashes->find({hash => $argsHash})) {print " already evaluated, skipping\n";txn_do($db, sub {$jobset->update({lastcheckedtime => time});});return;}
);-- This table is used to prevent repeated Nix expression evaluation-- for the same set of inputs for a jobset. In the scheduler, after-- obtaining the current inputs for a jobset, we hash the inputs-- together, and if the resulting hash already appears in this table,-- we can skip the jobset. Otherwise it's added to the table, and the-- Nix expression for the jobset is evaluated. The hash is computed-- over the command-line arguments to hydra_eval_jobs.create table JobsetInputHashes (project text not null,jobset text not null,hash text not null,timestamp integer not null,primary key (project, jobset, hash),foreign key (project) references Projects(name) on delete cascade on update cascade,foreign key (project, jobset) references Jobsets(project, name) on delete cascade on update cascade