Achtung: this requires a schema upgrade via "hydra-init".
G2T4WAHINIIKTEKR4JBIRXO5IEKO7MPYD7U5PQ4AIX3WL54EPP3AC
DDMYFZ5XWW6O3TGCVNW4GPVR2ZPRFWIPQU54DCVUYARFXIMKENUQC
L2E6EVE2RVFVDCUNRJ4CZYSQNS2DZUA5DTBETHBDUQUV2KQQRAOQC
JM3DPYOMVNMCL5GMEYC3Y4NDRGTNIFBBFTPGPVT66GPENVPU7EVQC
J5UVLXOK6EDIL5I7VKWH4V2QDS4DPD7FHRK6XBWSXFRQS4JKXFZQC
SB2V735VJ2CDHGCXRUA5FOYHDRXQFVOZ3KXC3YKXWRNW6DIX7RXQC
RFE6T5LGBFFNEPHZOPF4UNMFC2L4CGD5TPAMOXDLRPH3TZJ43UBAC
HPEG2RHVNHOPB5T4ZRXANIRBMVOVY3B5GFETJRYOTDJFVAYH2TQAC
D5QIOJGPKQJIYBUCSC3MFJ3TXLPNZ2XMI37GXMFRVRFWWR2VMTFAC
5SHCWE7XPQORSOLY7HGAIK2ODKBFPY4KVXRL2W7X6D4WALU544HQC
KPZNJ33UUF6TK5OPB6K5KLM3ZEK7YV3IF32HTLJFURCO6ICDMGYAC
N22GPKYTOLZLBGTGDATQDVZ4R5APZEAOIA7L32X4UXBH4XNI7MWAC
34DPX2ORV3XDB7M3NYVZLU6MYDIFCIC5NHPJW7N5ITS356GWVMCAC
E2TOU3L66CH5DA4XPATQM5YM63SFXX63V7SDOIGS4ND3GR7HQALAC
# Created by DBIx::Class::Schema::Loader v0.07014 @ 2012-02-29 18:56:22
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:w16c86FRReLPdA8H0yTIRg
# Created by DBIx::Class::Schema::Loader v0.07014 @ 2012-04-15 16:38:10
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:AltTdmkzfwBMYToTkj84vA
use utf8;
package Hydra::Schema::JobsetEvalInputs;
# Created by DBIx::Class::Schema::Loader
# DO NOT MODIFY THE FIRST PART OF THIS FILE
=head1 NAME
Hydra::Schema::JobsetEvalInputs
=cut
use strict;
use warnings;
use base 'DBIx::Class::Core';
=head1 TABLE: C<JobsetEvalInputs>
=cut
__PACKAGE__->table("JobsetEvalInputs");
=head1 ACCESSORS
=head2 eval
data_type: 'integer'
is_foreign_key: 1
is_nullable: 0
=head2 name
data_type: 'text'
is_nullable: 0
=head2 altnr
data_type: 'integer'
is_nullable: 0
=head2 type
data_type: 'text'
is_nullable: 0
=head2 uri
data_type: 'text'
is_nullable: 1
=head2 revision
data_type: 'text'
is_nullable: 1
=head2 value
data_type: 'text'
is_nullable: 1
=head2 dependency
data_type: 'integer'
is_foreign_key: 1
is_nullable: 1
=head2 path
data_type: 'text'
is_nullable: 1
=head2 sha256hash
data_type: 'text'
is_nullable: 1
=cut
__PACKAGE__->add_columns(
"eval",
{ data_type => "integer", is_foreign_key => 1, is_nullable => 0 },
"name",
{ data_type => "text", is_nullable => 0 },
"altnr",
{ data_type => "integer", is_nullable => 0 },
"type",
{ data_type => "text", is_nullable => 0 },
"uri",
{ data_type => "text", is_nullable => 1 },
"revision",
{ data_type => "text", is_nullable => 1 },
"value",
{ data_type => "text", is_nullable => 1 },
"dependency",
{ data_type => "integer", is_foreign_key => 1, is_nullable => 1 },
"path",
{ data_type => "text", is_nullable => 1 },
"sha256hash",
{ data_type => "text", is_nullable => 1 },
);
=head1 PRIMARY KEY
=over 4
=item * L</eval>
=item * L</name>
=item * L</altnr>
=back
=cut
__PACKAGE__->set_primary_key("eval", "name", "altnr");
=head1 RELATIONS
=head2 dependency
Type: belongs_to
Related object: L<Hydra::Schema::Builds>
=cut
__PACKAGE__->belongs_to(
"dependency",
"Hydra::Schema::Builds",
{ id => "dependency" },
{ join_type => "LEFT" },
);
=head2 eval
Type: belongs_to
Related object: L<Hydra::Schema::JobsetEvals>
=cut
__PACKAGE__->belongs_to("eval", "Hydra::Schema::JobsetEvals", { id => "eval" }, {});
# Created by DBIx::Class::Schema::Loader v0.07014 @ 2012-04-15 16:38:10
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:PNxVBdoUNeUzf5BztiIhLw
# You can replace this text with custom code or comments, and it will be preserved on regeneration
1;
# Created by DBIx::Class::Schema::Loader v0.07014 @ 2011-12-05 14:15:43
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:eQtF5bcR/qZ625LxWBc7ug
# Created by DBIx::Class::Schema::Loader v0.07014 @ 2012-04-15 16:38:10
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:Yt39QbkhH52hfpJZ4ZECeg
}
foreach my $name (keys %{$inputInfo}) {
for (my $n = 0; $n < scalar(@{$inputInfo->{$name}}); $n++) {
my $input = $inputInfo->{$name}->[$n];
$ev->jobsetevalinputs->create(
{ name => $name
, altnr => $n
, type => $input->{type}
, uri => $input->{uri}
, revision => $input->{revision}
, value => $input->{value}
, dependency => $input->{id}
, path => $input->{storePath} || "" # !!! temporary hack
, sha256hash => $input->{sha256hash}
});
}
);
create table JobsetEvalInputs (
eval integer not null references JobsetEvals(id) on delete cascade,
name text not null,
altNr integer not null,
-- Copied from the jobsetinputs from which the build was created.
type text not null,
uri text,
revision text,
value text,
dependency integer, -- build ID of the input, for type == 'build'
path text,
sha256hash text,
primary key (eval, name, altNr),
foreign key (dependency) references Builds(id)
create index IndexJobsetEvalMembersOnEval on JobsetEvalMembers(eval);
-- Inputs of jobset evals.
create table JobsetEvalInputs (
eval integer not null references JobsetEvals(id) on delete cascade,
name text not null,
altNr integer not null,
-- Copied from the jobsetinputs from which the build was created.
type text not null,
uri text,
revision text,
value text,
dependency integer, -- build ID of the input, for type == 'build'
path text,
sha256hash text,
primary key (eval, name, altNr),
foreign key (dependency) references Builds(id)
);
-- Reconstruct the repository inputs for pre-existing evals. This is
-- tricky (and not entirely possible) because builds are not uniquely
-- part of a single eval, so they may have different inputs.
-- For Subversion or Bazaar inputs, pick the highest revision for each
-- input.
insert into JobsetEvalInputs (eval, name, altNr, type, uri, revision)
select e.id, b.name, 0, max(b.type), max(b.uri), max(b.revision)
from (select id from JobsetEvals where hasNewBuilds = 1) e
join JobsetEvalMembers m on e.id = m.eval
join BuildInputs b on b.build = m.build
where (b.type = 'svn' or b.type = 'svn-checkout' or b.type = 'bzr' or b.type = 'bzr-checkout')
group by e.id, b.name
having count(distinct type) = 1 and count(distinct uri) = 1;
-- For other inputs there is no "best" revision to pick, so only do
-- the conversion if there is only one.
insert into JobsetEvalInputs (eval, name, altNr, type, uri, revision)
select e.id, b.name, 0, max(b.type), max(uri), max(revision)
from (select id from JobsetEvals where hasNewBuilds = 1) e
join JobsetEvalMembers m on e.id = m.eval
join BuildInputs b on b.build = m.build
where (b.type != 'svn' and b.type != 'svn-checkout' and b.type != 'bzr' and b.type != 'bzr-checkout')
and b.uri is not null and b.revision is not null
and not exists(select 1 from JobsetEvalInputs i where e.id = i.eval and b.name = i.name)
group by e.id, b.name
having count(distinct type) = 1 and count(distinct uri) = 1 and count(distinct revision) = 1;