package Gsg::Html;
use strict;
use warnings;
use Log::Log4perl qw(:easy);
use lib "/usr/local/lib";
use Shellex::Shellex qw(shellex findBin);
use Gsg::Gather qw(get_file_tree);
use Gsg::MdParse qw (render_readme);
use Exporter qw(import);
our @EXPORT_OK = qw(
write_file append_file write_root_index clean_web_root
write_project_content
);
# These subs might belong in shellex
# Add logger for write opts TODO
sub write_file($$) {
my $content = shift;
my $path = shift;
open(my $fh, ">", $path) or die "Couldnt open $path\n";
print $fh "$content";
close $fh;
}
sub append_file($$) {
my $content = shift;
my $path = shift;
open(my $fh, ">>", $path) or die "Couldnt open $path\n";
print $fh "$content";
close $fh;
}
sub write_root_index($$$$) {
my $index = shift;
my $project_dirs_ref = shift;
my $web_projects_dir_path = shift;
my $logger = shift;
write_file("", $index);
append_file("
Git Projects
\n",$index);
append_file("Statically generated web root for browsing this git server
",$index);
append_file("This is a read-only site and does not provide a merge/clone interface
",$index);
my $mkdirCmd = findBin("mkdir",$logger);
foreach my $project ( @$project_dirs_ref ) {
my $indexPath = $project . "index.html";
append_file("",$index);
append_file("$project | ",$index);
shellex("$mkdirCmd -p $web_projects_dir_path$project",$logger);
}
append_file("
",$index);
$logger->info("Wrote root index at $index");
}
# Main sub for generating project page
# Might make more sense to split into more subs?
sub write_project_content($$$$) {
my $project_dirs_ref = shift;
my $trimmed_project_dirs_ref = shift;
my $web_projects_dir = shift;
my $logger = shift;
# Make these array's easier to work with in a hash
# Key is path to actual git dir, val is path to associated web dir
my %projects_map;
@projects_map{@$project_dirs_ref} = @$trimmed_project_dirs_ref;
$logger->info("Assembling data structures of git info");
# Write files part of project index
foreach my $project_path ( keys %projects_map ) {
my $spec_web_dir = $web_projects_dir . $projects_map{$project_path};
my $project_index = $spec_web_dir . "index.html";
write_file("",$project_index);
append_file("Return to index
",$project_index);
# Get all project data structures/info
my ( $file_tree_ref, $file_content_ref, $commits_ref, $commit_ids_ref ) = get_file_tree($project_path,$logger);
# Handle README
if ( grep /^README.md$/, keys %$file_content_ref ) {
$logger->info("$projects_map{$project_path} contains a README");
my $readme_html = render_readme(${$file_content_ref}{'README.md'},$logger);
append_file("$readme_html",$project_index);
}
append_file("Files for $projects_map{$project_path}
",$project_index);
append_file("
",$project_index);
## Write files ##
append_file("File | Commit |
",$project_index);
foreach my $filename ( sort keys %$file_content_ref ) {
my $browserCompat = $filename . ".txt";
# Rewrite dir paths so we can save on disk without producing actual dir structure
if ( $filename =~ m/\// ) {
my $copy = $filename;
$copy =~ s/\//_/g;
$browserCompat = $copy . ".txt";
}
append_file("$filename | ${$file_tree_ref}{$filename} | ",$project_index);
write_file("${$file_content_ref}{$filename}",$spec_web_dir . $browserCompat);
}
append_file("
",$project_index);
append_file("
", $project_index);
append_file("Logs for $projects_map{$project_path}
",$project_index);
append_file("",$project_index);
append_file("
",$project_index);
# iterate over array to keep ordering
foreach my $commit_id ( @$commit_ids_ref ) {
my $filename = $commit_id . ".txt";
append_file("$filename | ",$project_index);
write_file(${$commits_ref}{$commit_id},$spec_web_dir . $filename);
}
append_file("
",$project_index);
append_file("