123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153 |
- package Gsg::Html;
- use strict;
- use warnings;
- use Log::Log4perl qw(:easy);
- use lib "/usr/local/lib";
- use Shellex::Shellex qw(shellex findBin);
- use Gsg::Gather qw(get_file_tree);
- use Gsg::MdParse qw (render_readme);
- use Exporter qw(import);
- our @EXPORT_OK = qw(
- write_file append_file write_root_index clean_web_root
- write_project_content
- );
- # These subs might belong in shellex
- # Add logger for write opts TODO
- sub write_file($$) {
- my $content = shift;
- my $path = shift;
- open(my $fh, ">", $path) or die "Couldnt open $path\n";
- print $fh "$content";
- close $fh;
-
- }
- sub append_file($$) {
- my $content = shift;
- my $path = shift;
- open(my $fh, ">>", $path) or die "Couldnt open $path\n";
- print $fh "$content";
- close $fh;
- }
- sub write_root_index($$$$) {
- my $index = shift;
- my $project_dirs_ref = shift;
- my $web_projects_dir_path = shift;
- my $logger = shift;
- write_file("", $index);
- append_file("<html><body><b>Git Projects</b><br><head><META NAME=\"ROBOTS\" CONTENT=\"NOINDEX, NOFOLLOW\"></head>\n",$index);
- append_file("<small><i>Statically generated web root for browsing this git server</i></small><br>",$index);
- append_file("<small><i>This is a read-only site and does not provide a merge/clone interface</i></small><hr/>",$index);
-
- my $mkdirCmd = findBin("mkdir",$logger);
- foreach my $project ( @$project_dirs_ref ) {
- my $indexPath = $project . "index.html";
- append_file("<table><div id=\"cotent\"><table id=\"index\"><tbody>",$index);
- append_file("<tr><td><a href=\"projects/$indexPath\">$project</a></td>",$index);
- shellex("$mkdirCmd -p $web_projects_dir_path$project",$logger);
- }
- append_file("</tr></tbody></table></div></body></html>",$index);
- $logger->info("Wrote root index at $index");
- }
- # Main sub for generating project page
- # Might make more sense to split into more subs?
- sub write_project_content($$$$) {
- my $project_dirs_ref = shift;
- my $trimmed_project_dirs_ref = shift;
- my $web_projects_dir = shift;
- my $logger = shift;
- # Make these array's easier to work with in a hash
- # Key is path to actual git dir, val is path to associated web dir
- my %projects_map;
- @projects_map{@$project_dirs_ref} = @$trimmed_project_dirs_ref;
- $logger->info("Assembling data structures of git info");
- # Write files part of project index
- foreach my $project_path ( keys %projects_map ) {
- my $spec_web_dir = $web_projects_dir . $projects_map{$project_path};
- my $project_index = $spec_web_dir . "index.html";
- write_file("",$project_index);
- append_file("<html><a href=\"../../index.html\">Return to index</a></b><hr/>",$project_index);
- # Get all project data structures/info
- my ( $file_tree_ref, $file_content_ref, $commits_ref, $commit_ids_ref ) = get_file_tree($project_path,$logger);
- # Handle README
- if ( grep /^README.md$/, keys %$file_content_ref ) {
- $logger->info("$projects_map{$project_path} contains a README");
- my $readme_html = render_readme(${$file_content_ref}{'README.md'},$logger);
- append_file("$readme_html",$project_index);
- }
- append_file("<b>Files for $projects_map{$project_path}</b><br>",$project_index);
- append_file("<hr/>",$project_index);
- ## Write files ##
- append_file("<table><div id=\"cotent\"><table id=\"index\"><thead><tr><td><b>File</b></td><td><b>Commit</b></td></tr></thead><tbody>",$project_index);
- foreach my $filename ( sort keys %$file_content_ref ) {
- my $browserCompat = $filename . ".txt";
- # Rewrite dir paths so we can save on disk without producing actual dir structure
- if ( $filename =~ m/\// ) {
- my $copy = $filename;
- $copy =~ s/\//_/g;
- $browserCompat = $copy . ".txt";
- }
- append_file("<tr><td><a href=\"$browserCompat\">$filename</a></td><td>${$file_tree_ref}{$filename}</td>",$project_index);
- write_file("${$file_content_ref}{$filename}",$spec_web_dir . $browserCompat);
- }
- append_file("</tr></tbody></table></div></body>",$project_index);
- append_file("<br>", $project_index);
- append_file("<html><b>Logs for $projects_map{$project_path}</b><br>",$project_index);
- append_file("<table><div id=\"cotent\"><table id=\"index\"><tbody>",$project_index);
- append_file("<hr/>",$project_index);
- # iterate over array to keep ordering
- foreach my $commit_id ( @$commit_ids_ref ) {
- my $filename = $commit_id . ".txt";
- append_file("<tr><td><a href=\"$filename\">$filename</a></td>",$project_index);
- write_file(${$commits_ref}{$commit_id},$spec_web_dir . $filename);
- }
- append_file("</tr></tbody></table></div></body>",$project_index);
- append_file("</html>",$project_index);
- }
- $logger->info("Done writing files");
- }
- # Not used currently, need to do more trimming/etc
- # TODO
- # Work around is rm -rf the webroot manually and then just rerun gsg
- sub clean_web_root($$$) {
- my $web_projects_dir_path = shift;
- my $git_projects_ref = shift;
- my $logger = shift;
- my $lsCmd = findBin("ls",$logger);
- my $rmCmd = findBin("rm",$logger);
- foreach my $dir ( split("\n", shellex("$lsCmd -d $web_projects_dir_path/*/",$logger)) ) {
- if ( ! grep( /^$dir$/, @$git_projects_ref ) ) {
- $logger->info("Found $dir in webroot but not in git root, removing...");
- my $rmdir = $web_projects_dir_path . $dir;
- $logger->info("Would remove $rmdir");
- # Does this need to be safer? TODO
- #shellex("$rmCmd $rmdir/*",$logger);
- #shellex("$rmCmd -d $rmdir",$logger);
- }
- }
- }
- 1;
|