Builds can now emit metrics that Hydra will store in its database and render as time series via flot charts. Typical applications are to keep track of performance indicators, coverage percentages, artifact sizes, and so on.
For example, a coverage build can emit the coverage percentage as follows:
echo "lineCoverage $pct %" > $out/nix-support/hydra-metrics
Graphs of all metrics for a job can be seen at
http://…/job/<project>/<jobset>/<job>#tabs-charts
Specific metrics are also visible at
http://…/job/<project>/<jobset>/<job>/metric/<metric>
The latter URL also allows getting the data in JSON format (e.g. via "curl -H 'Accept: application/json'").
T5BIOVJEMBIASP7EKQVV2N3VD6I56UXH6LCD5I33BDQEVHJAMGKQC LKEX7GZ4KQKK567K55PWJHISLNW5EJMFKGBGVY2VI6VUGOK5TYQAC PCD3ZH6ZMXTB53MASR7SJGAR4HME2TETMTMXSUR5ZMXTB3I3FH3QC BB2KXLXZ7NTYXKP7DHSPDKLKZG4LCV3YP7NRT4Q5AY5Q2KUPA4EQC WHULPA6SJJVCXISPL3GAQIRIKQN2DTUSMWGP2ECITWYMLHOON3SQC YHP5DSOOKAXAYHUDMYO6EKX2YAN7DLDQPJX5K52TCP7EXQVP6JAAC OG3Z3QGCG2FNDGF4VQYWOXLZCF7LGTZCUMGKUCBNAPAQ5FRFHQ2AC MHVIT4JYWUYD4UCGB2AHLXWLX6B5SYE22BREERNGANT7RGGDUFOAC 4S5JF5JPKWTDYHFJMTXOFTDAMYHD5ON2UBTLMGVYPJCP6QYIM2EAC 24BMQDZAWDQ7VNIA7TIROXSOYLOJBNZ2E4264WHWNJAEN6ZB3UOAC PQFOMNTLFY4HINJFAQYIFTBTSDRST6W2WNCVKE5ITR2IDF4SWWXQC HJOEIMLRDVQ2KZI5HGL2HKGBM3AHP7YIKGKDAGFUNKRUXVRB24NAC KSBB33RE2PK5SFN7ZMOTZJQHZB4JYIIUUKWDSD3LSZ5GD465AJHQC DEMSSSB22UI2ZN26C56UYO4NZWMDCRYKLAQARGOTSORSDXX3ZNNQC 6GZZDDQBAIAZJAESJDIT5JKVY6F7IQTJHG6QT2HHBMK2EWE2MHCQC D5QIOJGPKQJIYBUCSC3MFJ3TXLPNZ2XMI37GXMFRVRFWWR2VMTFAC SB2V735VJ2CDHGCXRUA5FOYHDRXQFVOZ3KXC3YKXWRNW6DIX7RXQC PMNWRTGJ4GVSMSSAWSUD57B26PCRAHMZIQ5SIWJIK7A74ENKEQLAC J5UVLXOK6EDIL5I7VKWH4V2QDS4DPD7FHRK6XBWSXFRQS4JKXFZQC L2E6EVE2RVFVDCUNRJ4CZYSQNS2DZUA5DTBETHBDUQUV2KQQRAOQC FTPCV25MOLQUNR5CAR453W7T7QTUZRLPLEOSDZ5HSDFAXQZVHOYQC ZIIXICG7TBYQAGTNXGZXLLPVRLZ5VU3RCDDUAD3D7PHZZON4U7MQC JM3DPYOMVNMCL5GMEYC3Y4NDRGTNIFBBFTPGPVT66GPENVPU7EVQC S5PV6IIMKJ7PGWIFLLXERHYF3BCP2UEGFRZEZLD6UUBLVEZXJLUAC W5OAZWPDZOGVAL5P4ROVZSZGCSJBMPRALW6SCGV6L7A5DBAK2JLQC X27GNHDV5KPZ5GSH6DCAJMNCEMZLCP7M43JWF2X3O5QWXMOX273AC ZWCTAZGLJZQNTYWTC2XQUKMILJF6JGDL5IND6QNYWK4FIGMLRFXAC Y6AHH4THYQA43V77L43YM42DYRPCMDSWLUV4NKWAQYMPL4NTUIPQC 7ECJWNVXNO3BKM7B7FIFIRBE77QET5PK2C3XKVQUXCYKHDP3V4UQC JIJDYWPYMZZNFBCWYSYR72RNEW5MSI26MJ5UXXIU7DDX25IBR3CQC D7PL2VWUCRIFGFPFXTB466NIUYU4FECAQPHF4TDFVMOK23VEHDEAC 2GUAKGTBTNFFER343SQWSLFYIXXHJLDSGH5JHF7QMC3AVZB7Q3TQC IK53RV4VGOHLCZGQCCIKPB45M3C7M7YMNBOJFBGZJ4LWIZNU4QNQC XAJFR6SR6Q56FCL3VYQTP534HD7JFZAMCYY376Y7DVZY4BXM2UWQC ODNCGFQ5FPKFI624BVMLW7PJ2EFJOR3TY66OCZM42UNNTWBCF2TQC HQ54SEMS5XMJN6R3FHKFFXLPV7TZQSEB37MDGP4EEN2KY5JEG7BAC N22GPKYTOLZLBGTGDATQDVZ4R5APZEAOIA7L32X4UXBH4XNI7MWAC 6BLUKEQ2M5RGWMPXPYIFIEVEUBV4PYAZ75S2WSBIATMRGYFMQZHQC 6QRHXIM3XHCDLSIIBEGETDV67V6LTV55QMHC64ZPBMLTAECM5N3QC static std::tuple<bool, string> secureRead(Path fileName){auto fail = std::make_tuple(false, "");if (!pathExists(fileName)) return fail;try {/* For security, resolve symlinks. */fileName = canonPath(fileName, true);if (!isInStore(fileName)) return fail;return std::make_tuple(true, readFile(fileName));} catch (Error & e) { return fail; }}
/* For security, resolve symlinks. */try {productsFile = canonPath(productsFile, true);} catch (Error & e) { continue; }if (!isInStore(productsFile)) continue;string contents;try {contents = readFile(productsFile);} catch (Error & e) { continue; }for (auto & line : tokenizeString<Strings>(contents, "\n")) {
for (auto & line : tokenizeString<Strings>(std::get<1>(file), "\n")) {
/* Get metrics. */for (auto & output : outputs) {auto file = secureRead(output + "/nix-support/hydra-metrics");for (auto & line : tokenizeString<Strings>(std::get<1>(file), "\n")) {auto fields = tokenizeString<std::vector<std::string>>(line);if (fields.size() < 2) continue;BuildMetric metric;metric.name = fields[0]; // FIXME: validatemetric.value = atof(fields[1].c_str()); // FIXMEmetric.unit = fields.size() >= 3 ? fields[2] : "";res.metrics[metric.name] = metric;}}
for (auto & metric : res.metrics) {txn.parameterized("insert into BuildMetrics (build, name, unit, value, project, jobset, job, timestamp) values ($1, $2, $3, $4, $5, $6, $7, $8)")(build->id)(metric.second.name)(metric.second.unit, metric.second.unit != "")(metric.second.value)(build->projectName)(build->jobsetName)(build->jobName)(build->timestamp).exec();}
auto res = txn.parameterized("select id, project, jobset, job, drvPath, maxsilent, timeout from Builds where id > $1 and finished = 0 order by id")(lastBuildId).exec();
auto res = txn.parameterized("select id, project, jobset, job, drvPath, maxsilent, timeout, timestamp from Builds where id > $1 and finished = 0 order by id")(lastBuildId).exec();
build->fullJobName = row["project"].as<string>() + ":" + row["jobset"].as<string>() + ":" + row["job"].as<string>();
build->projectName = row["project"].as<string>();build->jobsetName = row["jobset"].as<string>();build->jobName = row["job"].as<string>();
sub metric : Chained('job') PathPart('metric') Args(1) {my ($self, $c, $metricName) = @_;$c->stash->{template} = 'metric.tt';$c->stash->{metricName} = $metricName;my @res = $c->stash->{job}->buildmetrics->search({ name => $metricName },{ order_by => "timestamp", columns => [ "build", "name", "timestamp", "value", "unit" ] });$self->status_ok($c, entity => [ map { { id => $_->get_column("build"), timestamp => $_ ->timestamp, value => $_->value, unit => $_->unit } } @res ]);}
use utf8;package Hydra::Schema::BuildMetrics;# Created by DBIx::Class::Schema::Loader# DO NOT MODIFY THE FIRST PART OF THIS FILE=head1 NAMEHydra::Schema::BuildMetrics=cutuse strict;use warnings;use base 'DBIx::Class::Core';=head1 COMPONENTS LOADED=over 4=item * L<Hydra::Component::ToJSON>=back=cut__PACKAGE__->load_components("+Hydra::Component::ToJSON");=head1 TABLE: C<BuildMetrics>=cut__PACKAGE__->table("BuildMetrics");=head1 ACCESSORS=head2 builddata_type: 'integer'is_foreign_key: 1is_nullable: 0=head2 namedata_type: 'text'is_nullable: 0=head2 unitdata_type: 'text'is_nullable: 1=head2 valuedata_type: 'double precision'is_nullable: 0=head2 projectdata_type: 'text'is_foreign_key: 1is_nullable: 0=head2 jobsetdata_type: 'text'is_foreign_key: 1is_nullable: 0=head2 jobdata_type: 'text'is_foreign_key: 1is_nullable: 0=head2 timestampdata_type: 'integer'is_nullable: 0=cut__PACKAGE__->add_columns("build",{ data_type => "integer", is_foreign_key => 1, is_nullable => 0 },"name",{ data_type => "text", is_nullable => 0 },"unit",{ data_type => "text", is_nullable => 1 },"value",{ data_type => "double precision", is_nullable => 0 },"project",{ data_type => "text", is_foreign_key => 1, is_nullable => 0 },"jobset",{ data_type => "text", is_foreign_key => 1, is_nullable => 0 },"job",{ data_type => "text", is_foreign_key => 1, is_nullable => 0 },"timestamp",{ data_type => "integer", is_nullable => 0 },);=head1 PRIMARY KEY=over 4=item * L</build>=item * L</name>=back=cut__PACKAGE__->set_primary_key("build", "name");=head1 RELATIONS=head2 buildType: belongs_toRelated object: L<Hydra::Schema::Builds>=cut__PACKAGE__->belongs_to("build","Hydra::Schema::Builds",{ id => "build" },{ is_deferrable => 0, on_delete => "CASCADE", on_update => "NO ACTION" },);=head2 jobType: belongs_toRelated object: L<Hydra::Schema::Jobs>=cut__PACKAGE__->belongs_to("job","Hydra::Schema::Jobs",{ jobset => "jobset", name => "job", project => "project" },{ is_deferrable => 0, on_delete => "NO ACTION", on_update => "CASCADE" },);=head2 jobsetType: belongs_toRelated object: L<Hydra::Schema::Jobsets>=cut__PACKAGE__->belongs_to("jobset","Hydra::Schema::Jobsets",{ name => "jobset", project => "project" },{ is_deferrable => 0, on_delete => "NO ACTION", on_update => "CASCADE" },);=head2 projectType: belongs_toRelated object: L<Hydra::Schema::Projects>=cut__PACKAGE__->belongs_to("project","Hydra::Schema::Projects",{ name => "project" },{ is_deferrable => 0, on_delete => "NO ACTION", on_update => "CASCADE" },);# Created by DBIx::Class::Schema::Loader v0.07043 @ 2015-07-30 16:52:20# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:qoPm5/le+sVHigW4Dmum2Qsub json_hint {return { columns => ['value', 'unit'] };}1;
# Created by DBIx::Class::Schema::Loader v0.07043 @ 2015-07-30 16:03:55# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:EwxiaQpqbdzI9RvU0uUtLQ
# Created by DBIx::Class::Schema::Loader v0.07043 @ 2015-07-30 16:52:20# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:Y2lDtgY8EBLOuCHAI8fWRQ
# Created by DBIx::Class::Schema::Loader v0.07033 @ 2014-09-29 19:41:42# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:lnZSd0gDXgLk8WQeAFqByA
# Created by DBIx::Class::Schema::Loader v0.07043 @ 2015-07-30 16:52:20# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:vDAo9bzLca+QWfhOb9OLMg
# Created by DBIx::Class::Schema::Loader v0.07033 @ 2014-04-23 23:13:51# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:CO0aE+jrjB+UrwGRzWZLlw
# Created by DBIx::Class::Schema::Loader v0.07043 @ 2015-07-30 16:52:20# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:Coci9FdBAvUO9T3st2NEqA
# Created by DBIx::Class::Schema::Loader v0.07033 @ 2014-04-23 23:13:08# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:fkd9ruEoVSBGIktmAj4u4g
# Created by DBIx::Class::Schema::Loader v0.07043 @ 2015-07-30 16:52:20# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:67kWIE0IGmEJTvOIATAKaw
[% IF build.finished && build.buildmetrics %]<h3>Metrics</h3><table class="table table-small table-striped table-hover clickable-rows"><thead><tr><th>Name</th><th>Value</th></tr></thead><tbody>[% FOREACH metric IN build.buildmetrics %]<tr><td><tt><a class="row-link" href="[% c.uri_for('/job' project.name jobset.name job.name 'metric' metric.name) %]">[%HTML.escape(metric.name)%]</a></tt></td><td>[%metric.value%][%metric.unit%]</td></tr>[% END %]</tbody></table>[% END %]
[% WRAPPER layout.tt title="Job metric ‘$metricName’" %][% PROCESS common.tt %][% INCLUDE includeFlot %][% INCLUDE createChart id="chart" dataUrl=c.req.uri %][% END %]
-- Denormalisation for performance: copy some columns from the-- corresponding build.project text not null,jobset text not null,job text not null,timestamp integer not null,primary key (build, name),foreign key (build) references Builds(id) on delete cascade,foreign key (project) references Projects(name) on update cascade,foreign key (project, jobset) references Jobsets(project, name) on update cascade,foreign key (project, jobset, job) references Jobs(project, jobset, name) on update cascade);
create table BuildMetrics (build integer not null,name text not null,unit text,value double precision not null,-- Denormalisation for performance: copy some columns from the-- corresponding build.project text not null,jobset text not null,job text not null,timestamp integer not null,primary key (build, name),foreign key (build) references Builds(id) on delete cascade,foreign key (project) references Projects(name) on update cascade,foreign key (project, jobset) references Jobsets(project, name) on update cascade,foreign key (project, jobset, job) references Jobs(project, jobset, name) on update cascade);create index IndexBuildMetricsOnJobTimestamp on BuildMetrics(project, jobset, job, timestamp desc);