#!/usr/bin/env python
# encoding: utf-8
"""hgsite
Create and/or upload a static copy of the repository.
The main goal is sharing Mercurial on servers with only FTP access and
statically served files, while providing the same information as hg
serve and full solutions like bitbucket and gitorious (naturally
without the interactivity).
"""
__copyright__ = """Copyright 2012 till 2013 Arne Babenhauserheide
This software may be used and distributed according to the terms of the
GNU General Public License version 2 or any later version.
"""
import os
import shutil
import re
import mercurial
import ftplib
import socket
import datetime
from mercurial import cmdutil, util, scmutil
from mercurial import commands, dispatch
from mercurial.i18n import _
from mercurial import hg, discovery, util, extensions
_staticidentifier = ".statichgrepo"
_freenetprivkeystring = "AQECAAE/"
templates = {
"head": """<!DOCTYPE html>
<html><head>
<meta charset="utf-8" />
<meta http-equiv="Content-Type" content="text/html; charset=utf-8" /> <!--duplicate for older browsers-->
<link href='http://fonts.googleapis.com/css?family=OFL+Sorts+Mill+Goudy+TT:regular,italic' rel='stylesheet' type='text/css' />
<link rel="stylesheet" href="{relpath}style.css" type="text/css" media="screen" />
<link rel="stylesheet" href="{relpath}print.css" type="text/css" media="print" />
<title>{title}</title>
</head>
<body>
<div class="wrap">
<div class="top group">
<header id="maintitle">{reponame}</header>
<nav>{nav}</nav>
</div>
<div class="hr"> </div>
<div class="content">
""",
"srchead": """<!DOCTYPE html>
<html><head>
<meta charset="utf-8" />
<meta http-equiv="Content-Type" content="text/html; charset=utf-8" /> <!--duplicate for older browsers-->
<link rel="stylesheet" href="../../style.css" type="text/css" media="screen" />
<link rel="stylesheet" href="../../print.css" type="text/css" media="print" />
<link rel="stylesheet" href="../../prettify.css" type="text/css" />
<script type="text/javascript" src="../../prettify.js"></script>
<title>{filetitle}</title>
</head>
<body onload="prettyPrint()">
<div class="wrap">
<div class="content">
""",
"forkhead": """<!DOCTYPE html>
<html><head>
<meta charset="utf-8" />
<meta http-equiv="Content-Type" content="text/html; charset=utf-8" /> <!--duplicate for older browsers-->
<link href='http://fonts.googleapis.com/css?family=OFL+Sorts+Mill+Goudy+TT:regular,italic' rel='stylesheet' type='text/css' />
<link rel="stylesheet" href="{relpath}style.css" type="text/css" media="screen" />
<link rel="stylesheet" href="{relpath}print.css" type="text/css" media="print" />
<title>{forkname}</title>
</head>
<body>
<div class="wrap">
<div class="top group fullwidth">
<header>{forkname} <small>(fork of <a href="../../">{reponame}</a>, found at {forkuri})</small></header>
</div>
<div class="hr"> </div>
<div class="content">
""",
"foot": """<div class="hrb"> </div><footer>
<p>
Site made with the <a href="http://draketo.de/proj/hgsite">hg staticsite extension</a>,
Design based on the site from <a href="https://stevelosh.com">Steve Losh</a>
</p>
</footer></div><!-- /content --></div><!-- /wrap--></body></html>\n""",
# this screenstyle is mostly from the wonderful site http://stevelosh.com and licensed under MIT
"screenstyle": """
.bugnumbers {
font-size: x-small;
vertical-align: super;
}
.openbugnumber, .openbugnumber a {
color: #f00;
text-decoration: none;
}
.resolvedbugnumber, .resolvedbugnumber a {
color: #00f;
text-decoration: none;
}
/* Copyright (c) 2008-2010 Steve Losh
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.*/
html,body,div,span,applet,object,iframe,h1,h2,h3,h4,h5,h6,p,blockquote,pre,a,abbr,acronym,address,big,cite,code,del,dfn,em,font,img,ins,kbd,q,s,samp,small,strike,strong,sub,sup,tt,var,b,u,i,center,dl,dt,dd,ol,ul,li,fieldset,form,label,legend,table,caption,tbody,tfoot,thead,tr,th,td,header,nav,section,article,aside,footer{
border:0;margin:0;outline:0;padding:0;background:transparent;vertical-align:baseline;
}
article,aside,details,figcaption,figure,footer,header,hgroup,menu,nav,section{
display:block;
}
blockquote,q{
quotes:none;
}
blockquote:before,blockquote:after,q:before,q:after{
content:'';content:none;
}
header,nav,section,article,aside,footer{
display:block;
}
html{
overflow-y:scroll;
}
body{
background:#fdfdfd;color:#353535;font:normal 18px/25px Palatino,"Palatino Linotype",serif;text-rendering:optimizeLegibility;
}
html>body{
font-size:18px;line-height:25px;
}
img{
display:inline-block;vertical-align:bottom;
}
h1,h2,h3,h4,h5,h6,strong,b,dt,th{
font-weight:700;
}
address,cite,em,i,caption,dfn,var{
font-style:italic;
}
h1{
font-size:45px;line-height:50px;margin:25px 0;
}
h2{
font-size:32px;line-height:50px;margin:25px 0;
}
h3{
font-size:23px;line-height:25px;margin:25px 0;
}
h4{
margin:0 0 22px;font-size:16px;line-height:22px;
}
h5{
margin:0 0 22px;font-size:14px;line-height:22px;
}
h6{
margin:0 0 22px;font-size:12px;line-height:22px;
}
p,ul,ol,dl,blockquote,pre{
margin:0 0 25px;
}
li ul,li ol{
margin:0;
}
ul{
list-style:outside disc;
}
ol{
list-style:outside decimal;
}
li{
margin:0 0 0 44px;
}
dd{
padding-left:25px;
}
blockquote{
padding:0 25px;
}
a{
text-decoration:underline;
}
a:hover{
text-decoration:none;
}
abbr,acronym{
border-bottom:1px dotted;cursor:help;
}
del{
text-decoration:line-through;
}
ins{
text-decoration:overline;
}
sub{
font-size:14px;line-height:25px;vertical-align:sub;
}
sup{
font-size:14px;line-height:25px;vertical-align:super;
}
tt,code,kbd,samp,pre{
font-size:14px;line-height:25px;font-family:Menlo,Monaco,Consolas,"Courier New",monospace;
}
table{
border-collapse:collapse;border-spacing:0;margin:0 0 1.5em;
}
caption{
text-align:left;
}
th,td{
padding:.25em .5em;
}
tbody td,tbody th{
border:1px solid #222;
}
tfoot{
font-style:italic;
}
fieldset{
clear:both;
}
legend{
padding:0 0 1.286em;font-size:1.167em;font-weight:700;
}
fieldset fieldset legend{
padding:0 0 1.5em;font-size:1em;
}
* html legend{
margin-left:-7px;
}
*+html legend{
margin-left:-7px;
}
form .field,form .buttons{
clear:both;margin:0 0 1.5em;
}
form .field label{
display:block;
}
form ul.fields li{
list-style-type:none;margin:0;
}
form ul.inline li,form ul.inline label{
display:inline;
}
form ul.inline li{
padding:0 .75em 0 0;
}
input.radio,input.checkbox{
vertical-align:top;
}
label,button,input.submit,input.image{
cursor:pointer;
}
* html input.radio,* html input.checkbox{
vertical-align:middle;
}
*+html input.radio,*+html input.checkbox{
vertical-align:middle;
}
textarea{
overflow:auto;
}
input.text,input.password,textarea,select{
margin:0;font:1em/1.3 Helvetica,Arial,"Liberation Sans","Bitstream Vera Sans",sans-serif;vertical-align:baseline;
}
input.text,input.password,textarea{
border:1px solid #444;border-bottom-color:#666;border-right-color:#666;padding:2px;
}
* html button{
margin:0 .34em 0 0;
}
*+html button{
margin:0 .34em 0 0;
}
form.horizontal .field{
padding-left:150px;
}
form.horizontal .field label{
display:inline;float:left;width:140px;margin-left:-150px;
}
img.left{
display:inline;float:left;margin:0 1.5em .75em 0;
}
img.right{
display:inline;float:right;margin:0 0 .75em .75em;
}
.group:after{
content:".";display:block;height:0;clear:both;visibility:hidden;
}
html{
overflow-y:scroll;
}
body{
text-rendering:optimizeLegibility;color:#222222;position:relative;
}
body a{
text-decoration:none;color:#e50053;
}
body a:hover{
text-decoration:underline;
}
body h1,body h2,body h3,body h4,body h5,body h6{
font-family:'OFL Sorts Mill Goudy','OFL Sorts Mill Goudy TT',serif;font-weight:normal;
}
body h1 a,body h2 a,body h3 a,body h4 a,body h5 a,body h6 a{
color:#222222;
}
body h1 a:hover,body h2 a:hover,body h3 a:hover,body h4 a:hover,body h5 a:hover,body h6 a:hover{
color:#e50053;text-decoration:none;
}
body h1 .amp,body h2 .amp,body h3 .amp,body h4 .amp,body h5 .amp,body h6 .amp{
font-style:italic;
}
body h1{
font-size:45px;line-height:50px;margin:25px 0;
}
body h2{
font-size:32px;line-height:50px;margin:25px 0;
}
body h3{
font-size:23px;line-height:25px;margin:25px 0;
}
body h4{
font-size:18px;line-height:25px;margin:25px 0;
}
body code,body pre{
font-family:Consolas,Menlo,"Courier New",monospace;font-size:14px;
}
body .codehilite code,body .codehilite pre{
font-family:Consolas,Menlo,"Courier New",monospace;font-size:16px;line-height:25px;overflow-x:auto;border:1px solid #d5d5d5;border-left:10px solid #d5d5d5;background-color:#fafafa;padding:11px 15px 12px;margin-left:-25px;
}
body .codehilite pre::-webkit-scrollbar{
height:25px;
}
body .codehilite pre::-webkit-scrollbar-button:start,body .codehilite pre::-webkit-scrollbar-button:end{
display:none;
}
body .codehilite pre::-webkit-scrollbar-track-piece{
background-color:#eee;
}
body .codehilite pre::-webkit-scrollbar-thumb{
background-color:#bbb;border:7px solid #eee;-webkit-background-clip:padding-box;-webkit-border-radius:12px;
}
body p code,body li code,body table code{
border:1px solid #ccc;background-color:#fafafa;font-size:13px;padding:1px 3px;line-height:20px;margin:0;white-space:nowrap;
}
body .wrap{
width:612px;margin:0 auto;margin-bottom:5em;
}
body .wrap .top header{
float:left;font-family:'OFL Sorts Mill Goudy','OFL Sorts Mill Goudy TT',serif;font-size:23px;line-height:50px;text-transform:lowercase;padding-left:2px;width:210px;
}
body .wrap .top .fullwidth header{
float:left;font-family:'OFL Sorts Mill Goudy','OFL Sorts Mill Goudy TT',serif;font-size:23px;line-height:50px;text-transform:lowercase;padding-left:2px;width:100%;
}
body .wrap .top header .amp{
font-style:italic;
}
body .wrap .top header a{
color:#222222;
}
body .wrap .top header a:hover{
color:#e50053;
}
body .wrap .top nav{
/* reduced font size and/or line height compared to steve losh (18px/50px),
because we can have a long lis tof forks and use <sup> elements for the bugs.*/
font:normal 18px/35px 'OFL Sorts Mill Goudy','OFL Sorts Mill Goudy TT',serif;text-align:right;text-transform:lowercase;padding-right:2px;width:398px;float:left;
}
body .wrap .top nav .sep{
padding:0 4px;color:#666;
}
body .wrap .hr{
margin-top:-12px;margin-bottom:37px;height:25px;background:transparent url('data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAmQAAAAZCAIAAAAQUKXdAAADAklEQVR42u3c3UtTcRzHcf+nIOquu266TIICsceLLpKISKPQqOxGgkgjioouyrIUzbJI0xKV0CwpdW7HTQ+6ubNH3dzc3JQ+emSu0a1u4PvlkMnYLg7i2+/5/c4pWwNKW1ZfAFBUZRwClLi2n4+i8VAmk+FQACCWwP/Gymy2oacqGgun02k954AAIJZAIQ2UN7tOxePxZDLJcAmAWAKFGnouDLt7nwzULy0tKZZMlgCIJVDofMuhM80Hfhj9iuXKygqxBEAsgbX19fXcc6XxRlflpdYjVsBSLLVmyfEBQCyx1zNpUyW/ONqcC2OqY1Pf1ebvjcFgUGuWLFgCIJaglHYp14amP518se/sq4OGd+Lht1rX3MTi4mIqleIcLABiCWK5Fcs3ow8USz1qOyvudl+MRCLLy8urq6scJQDEEpRyK5ZPh+rtWOrx+VdrOBxmaw+A4sdSC0IOYFdMbXI6nS6XyzAMt9s9Ozs7Pz/vtyxFUQuTiUTi9XCjXcq6d5V61efzBQIBv9/v9XpN0/R4PHqj3q4PsT+NowpgpymUTJYorcmyc+y5Snn65f6RyUFlUvm0x8r8jbIAsNuTpRaE/MCusSw9RCHUP2saKKPRaCwWUxS1i0c7YFtGmhTLx19va+LUq/r91EsaOrXNRz+GQiG90dr8kI0HAOw8/SFiskTxJ0t9T2dS0URIQ+SzwTtVbw/PzLnzVytzcycAFGey5BCgBGKZvf7+hG7W88ccruko7/7dpgkytwk2/yQtABBL7FHB2IK9qUcz5a2ucyqlfW1lrqaUEgCxxF7nCUzZsaxpP2aYTp2A1TqlfcsexkoAxBLYMGB8VCmr28sd7nFt3tF2Hk7AAiCWwDZt4bnfd6W646jLdGiDa/4908kkAGIJbJj2j1/rPD7jM3QliUrJbWABEEvgH5O+0boPFXMBj321JaUEQCyBbeG4pdvA3uu97I/4NFDqQhGdfaWUAIglsG3U7B/x9GnXq337HkoJgFgChXRliE66JpNJ3aaHUgIocX8BBfw+1JXTYZkAAAAASUVORK5CYII=')/*../images/hr.png*/ top left no-repeat;
}
body .wrap .hrb{
height:25px;margin-top:50px;background:transparent url('data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAmQAAAAZCAIAAAAQUKXdAAADMklEQVR42u3c30tTcRjHcf+nIOquu266TIICsZ8XXSQRkUahUdmNBGFGFBVdlGI1NMsiTUtUSjOk1LUfOqZsnrOfujnd3Mw+eiDn8iZwY2Pvl2NseLaLhwc/Pud8z7diDQCQJaMfYLsKSgAA2WzfH0TjoXQ6TSlAWALATmNlJtPUWxONhVOplF5TEBCWAJBLA+X17hPxeHx5eZnhEoQlAORq6j03Mt33aLBxcXFRYclkCcISAHKdbT9wqnXfN9eAwnJlZYWwBGGJYvR7J5QFeW25v68Vjde6qy+8OmQGTIWlrllSHxCWKIGYJC9RgK5TSn602xzz40rHlv7LrV/uBoNBXbPkgiUIS5RSUpKXyGdSrg273x9/tud0236Xb/L+53rn3OTCwkIymeQcLAhLlF5YkpfIR8vp9YuxewpLPeq7qm73nI9EIktLS6urq1QJhCWKNynXsxCWKEBYPh5utMJSjw/jtnA4zNIe7BCWOjtvBwri1yaHw+F0OqkGCt91brfb4/HMzs76/f5QKKRVPBoi2742W0nZ8Lra6/UahqG/ioFAQMfoSB2vT+mz+gbrqyhpGVJLMFmiiCbL9X8wWSKvLae3XeNPlZQnn+8dnRpSRiYSCWuspOuwbbLUP1YGUDCmqYdQCRS+6zQfaKDUVUlrptS2A1oB2z7aorB8+Ommz+fTrxKb9Fsdo7c6Xp8yN7/BoG/LlfqByRJMlii7ltNzKp2MJjZ2S38ydKvm5UHP3HT21Uq6DrmTJSVA8YQlC3xQKJmrb45ps56f3pG6zsqeHzZNkJoerHsraTkQliiBvOS+EeRbMDZvLerRTHmj+4yS0rq3MqchKRQISxCWKF8zpt0Ky7qOIy6vQydgdZ2SsRKEJUo7L6kPdteg652Ssraj0j49ocU72tzO2oWAlgNhiZKMTGqCXaclPM39l2o7Dzu9di1wzd4zna4DYQkAG9zGxJWuox6/S/eEKCnZBhaEJQBsM+Ufa3hbNReYiUajsViMpARhCQBbwnFT28De6btoRPzWpgQ6+0pSgrAEgC1j3oHRmX6tetVAqWeSEoQlAOTSnSE66aot7rRND0mJ//UHiLRAti+ZCq0AAAAASUVORK5CYII=')/*../images/hrb.png*/ top left no-repeat;
}
body .wrap footer{
text-align:center;
}
body .wrap footer p{
font-size:14px;font-style:italic;line-height:50px;margin-bottom:0px;
}
body .wrap footer .rochester-made img{
opacity:0.7;padding:5px 20px;
}
hr{
border:none;background:#ccc;height:1px;margin-bottom:24px;
}
.splash{
color:#454545;text-align:center;font:normal 27px/32px 'OFL Sorts Mill Goudy','OFL Sorts Mill Goudy TT',serif;margin-bottom:-1px;padding-top:0;
}
.splash p{
margin-bottom:24px;
}
.splash .amp{
font-style:italic;
}
.splash .fn{
color:#454545;text-decoration:none;
}
.splash .fn:hover{
color:#e50053;text-decoration:none;
}
.splash .fn .last-name{
display:none;
}
.section-listing{
margin-bottom:-50px;
}
.section-listing ol{
list-style-type:none;
}
.section-listing ol li{
float:left;width:281px;height:150px;margin:0;text-align:center;
}
.section-listing ol li a{
font:normal 23px/32px 'OFL Sorts Mill Goudy','OFL Sorts Mill Goudy TT',serif;color:#222222;display:block;
}
.section-listing ol li a:hover{
color:#e50053;text-decoration:none;
}
.section-listing ol li span.snip{
font-size:18px;color:#333;font-family:'OFL Sorts Mill Goudy','OFL Sorts Mill Goudy TT',serif;line-height:25px;font-style:italic;
}
.section-listing ol li .amp{
font-style:italic;
}
.section-listing ol li:nth-child(odd){
margin-right:25px;
}
.section-listing ol li:nth-child(even){
margin-left:25px;
}
blockquote{
border:1px solid #ccc;background-color:#fafafa;padding:11px 15px 12px;margin-left:2em;overflow:auto;
}
blockquote p:last-child{
margin-bottom:0;
}
span.dquo{
margin-left:-0.23em;
}
#leaf-stats p{
color:#666;margin-top:-22px;margin-bottom:22px;
}
#leaf-content img{
display:block;margin:25px auto 26px;border:11px solid #e5e5e5;padding:1px;background:black;max-width:590px;
}
#leaf-content img.left,#leaf-content img.right{
border:none;background:none;padding:none;
}
#leaf-content img.left{
margin:0 1.5em 1em 0;
}
#leaf-content img.right{
margin:0 0 .75em 1em;
}
#leaf-content .gallery img{
background:none;padding:0;border:none;display:inline;margin-bottom:25px;margin-right:25px;
}
#leaf-content .toc ul{
list-style:none;
}
#leaf-content .toc ul:first-child>li{
margin-left:0em;
}
#leaf-content table{
padding:0px;margin-top:-8px;margin-bottom:25px;
}
#leaf-content table tr{
margin:0px;padding:0px;
}
#leaf-content table tr td,#leaf-content table tr th{
margin:0px;padding:5px 5px;line-height:23px;
}
#leaf-content table tr td{
border:1px solid #666;
}
#leaf-content.with-diagrams img{
display:block;margin:25px auto;padding:0;background:none;border:none;
}
.print-links{
display:none;
}
img.self{
border:none;padding:0;margin:0;margin-right:-108px;margin-top:-15px;margin-left:30px;margin-bottom:20px;
}
div#cboxCurrent{
bottom:-30px;font-size:17px;font-weight:normal;left:60px;
}
div.screenshots img{
max-width:580px;
}
#scrolling-header{
color:#999;font-size:23px;font-style:italic;font:italic 23px 'OFL Sorts Mill Goudy','OFL Sorts Mill Goudy TT',serif;line-height:30px;position:fixed;top:75px;text-align:right;width:180px;
}
.flattr{
float:right;padding-top:1px;
}
""",
# this printstyle is from the wonderful site http://stevelosh.com and licensed under MIT
"printstyle": """
/*Copyright (c) 2008-2010 Steve Losh
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.*/
body{
font:normal 10pt/1.25 Palatino,"Palatino Linotype";text-rendering:optimizeLegibility;
}
nav,header,.toc{
display:none;
}
#leaf-title a{
text-decoration:none;color:black;
}
#leaf-content ul.print-links{
display:block;font-size:1em;list-style-type:none;margin-left:0em;
}
#leaf-content ul.print-links a{
text-decoration:none;
}
#leaf-content code,#leaf-content pre{
font:normal 9pt Menlo,Monaco,Consolas,"Courier New",Courier,monospace;
}
#leaf-content img{
display:block;margin-left:auto;margin-right:auto;border:1.43em solid #e5e5e5;padding:1px;background:black;
}
#leaf-content img.left,#leaf-content img.right{
border:none;background:none;padding:none;
}
#leaf-content img.left{
margin:0 1.5em 1em 0;float:left;
}
#leaf-content img.right{
margin:0 0 .75em 1em;float:right;
}
div#leaf-content.with-diagrams img{
display:block;margin-left:auto;margin-right:auto;background:none;border:none;
}
span.amp{
font-family:"Palatino","Constantia","Palatino Linotype",serif;font-style:italic;
}
.flattr{
display:none;
}
a{
text-decoration:underline;color:#c06;
}
footer{
display:none;
}
""",
"manifesthead": """<h2>""" + _("Commit (click to see the diff)")+""": <a href='../../commit/{hex}.html'>{hex}</a></h2>
<p>{desc}</p><p>{user}</p>
<h2>""" + _("Diffstat") + """</h2>
<pre>{diffstat}</pre>
<h2>""" + _("Files in this revision") + "</h2>",
"commitlog": """\n<div style='float: right; padding-left: 0.5em'><em>({author|person})</em></div><strong> {date|shortdate}: <a href='{relativepath}src/{node}/index.html'>{desc|strip|fill68|firstline}</a></strong> <span style='font-size: xx-small'>{branches} {tags} {bookmarks}</span><p>{desc|escape}</p>\n""",
}
_indexregexp = re.compile("^\\.*index.html$")
def samefilecontent(filepath1, filepath2):
"""Check if the content of the two referenced files is equal."""
try:
with open(filepath1) as f1:
with open(filepath2) as f2:
return f1.read() == f2.read()
except OSError: return False
def contentequals(filepath, content):
"""Check if the files content is content."""
try:
with open(filepath) as f:
return f.read() == content
except OSError: return not content
except IOError: return False # file does not exist. Empty != not existing.
# TODO: check: return True if content is None?
def bisenabled():
"""Check if the b extension is enabled to decide if we want to add
a bug listing."""
enabled = extensions.enabled()
if "b" in enabled:
return True
def splitbugline(line):
"""Split a b extension bug line into the ID and the description."""
try:
bugid = line.split("-")[0].strip()
except IndexError:
return "", line
description = "".join(line.split("-")[1:]).lstrip()
return bugid, description
def getbugdetails(ui, repo, bugid):
"""Get the details for a bug."""
# first get the details
ui.pushbuffer()
req = dispatch.request(["b", "details", bugid], ui=ui, repo=repo)
dispatch.dispatch(req)
return ui.popbuffer()
def getbugfullid(details, bugid):
"""Get the real ID of a bug from its detailed info. If it’s not available, just give the short bugid"""
try:
idline = [i for i in details.splitlines() if i.startswith("ID: ")][0]
except IndexError: # no id line
return bugid
realid = idline[4:].strip()
return realid
class BBug(object):
"""A b-extension bug."""
def __init__(self, shortid, fullid, description, state, details=""):
self.shortid, self.fullid, self.description, self.state, self.details = shortid, fullid, description, state, details
def getbuginfo(ui, repo, bugline):
"""Get information about a bug from its bugline."""
shortid, description = splitbugline(bugline)
details = getbugdetails(ui, repo, shortid)
fullid = getbugfullid(details, shortid)
return shortid, fullid, description, details
def getbugs(ui, repo):
"""Get all bugs."""
if not bisenabled():
return [], []
# run the b command to get all open bugs
ui.pushbuffer()
req = dispatch.request(["b"], ui=ui, repo=repo)
dispatch.dispatch(req)
openbuglines = [line for line in ui.popbuffer().splitlines() if "-" in line]
# similarly get all resolved bugs
ui.pushbuffer()
req = dispatch.request(["b", "list", "-r"], ui=ui, repo=repo)
dispatch.dispatch(req)
resolvedbuglines = [line for line in ui.popbuffer().splitlines() if "-" in line]
# now turn them into a list of bugs
openbugs = []
for bugline in openbuglines:
bugid, fullid, description, details = getbuginfo(ui, repo, bugline)
openbugs.append(BBug(bugid, fullid, description, "open", details))
resolvedbugs = []
for bugline in resolvedbuglines:
bugid, fullid, description, details = getbuginfo(ui, repo, bugline)
resolvedbugs.append(BBug(bugid, fullid, description, "resolved", details))
return openbugs, resolvedbugs
def parsereadme(filepath, truncated=False):
"""Parse the readme file"""
with open(filepath) as r:
readme = r.read()
if truncated:
return "<pre style=\"white-space: pre-wrap; white-space: -moz-pre-wrap; white-space: -pre-wrap; white-space: -o-pre-wrap; word-wrap: break-word;\">" + "\n".join(readme.splitlines()[:5]) + "</pre>"
else:
return "<pre style=\"white-space: pre-wrap; white-space: -moz-pre-wrap; white-space: -pre-wrap; white-space: -o-pre-wrap; word-wrap: break-word;\">" + readme + "</pre>"
def overviewlogstring(ui, repo, revs, template=templates["commitlog"]):
"""Get the string for a log of the given revisions for the overview page."""
ui.pushbuffer()
t = cmdutil.changeset_templater(ui, repo, patch=False, diffopts=None, mapfile=None, buffered=False)
t.use_template(template.replace("{relativepath}", ""))
for c in revs:
ctx = repo.changectx(c)
t.show(ctx)
return ui.popbuffer()
def writeoverview(ui, repo, target, name):
"""Create the overview page"""
ui.debug("[staticsite] writeoverview: header\n")
overview = ""
# start with the nav
nav = ""
# now the links to the log and the files.
nav += "<a href='commits'>" + _("changelog") + "</a> - <a href='src/" + repo["tip"].hex() + "/'>" + _("files") + "</a>"
# and the bugs
openbugs, resolvedbugs = getbugs(ui, repo)
if openbugs or resolvedbugs:
nav += " - <a href=\"bugs\">" + _("bugs") + "</a>"
if openbugs:
nav += " <span class=\"bugnumbers\">(<span class=\"openbugnumber\"><a href=\"bugs#open\">" + str(len(openbugs)) + "!</a></span> "
else:
nav += " <span class=\"bugnumber openbugnumberzero\">0</span>"
nav += "<span class=\"bugnumber resolvedbugnumber\"><a href=\"bugs#resolved\">" + str(len(resolvedbugs)) + "√</a></span>)</span>"
# and the forks
ui.debug("[staticsite] writenav: header: forks\n")
forks = getforkinfo(ui, target)
if forks:
nav += " - " + _("forks: ")
for forkname, forkuri in forks.items():
ui.debug("[staticsite] writenav: fork: " + forkname + ": " + forkuri + "\n")
ui.debug("[staticsite] writenav: forks: getforkdir\n")
nav += "<a href='" + getforkdir(target, forkname) + "'>" + forkname + "</a> "
ui.debug("[staticsite] writenav: forks: getincoming\n")
incoming, fn, localother = getincoming(ui, repo, otheruri=forkuri, othername=forkname)
nav += "<small>(" + str(len(incoming))
ui.debug("[staticsite] writenav: forks: getoutgoing\n")
outgoing, fn, localother = getoutgoing(ui, repo, otheruri=forkuri, othername=forkname)
nav += "<small>↓↑</small>" + str(len(outgoing)) + ")</small> "
# embed in the overview via the template
overview += templates["head"].replace("{reponame}", name).replace("{title}", name).replace("{nav}", nav).replace("{relpath}", "./")
# add a short identifier from the first line of the readme, if it
# exists # TODO: Parse different types of readme files
readme = name
for f in os.listdir(repo.root):
if f.lower().startswith("readme"):
readme = parsereadme(os.path.join(repo.root, f))
readme_intro = parsereadme(os.path.join(repo.root, f), truncated=True)
overview += "<div id='intro'>"
overview += readme_intro
overview += "</div>"
break
ui.debug("[staticsite] writeoverview: shortlog\n")
# now add the 5 most recent log entries
# divert all following ui output to a string, so we can just use standard functions
overview += "\n<div id='shortlog'><h2>Changes (<a href='commits'>full changelog</a>)</h2>\n"
ui.pushbuffer()
t = cmdutil.changeset_templater(ui, repo, patch=False, diffopts=None, mapfile=None, buffered=False)
t.use_template(templates["commitlog"].replace("{relativepath}", ""))
for c in range(1, min(len(repo.changelog), 5)):
ctx = repo.changectx(str(-c))
t.show(ctx)
overview += ui.popbuffer()
overview += "</div>"
# Add branch, bookmark and tag information, if they exist.
ui.debug("[staticsite] writeoverview: branches, tags and bookmarks\n")
branches = []
for branch, heads in repo.branchmap().items():
if branch and branch != "default": # not default
branches.extend([h for h in heads if not repo[h].closesbranch()])
# if other branches exist, add default, too.
if branches:
branches.extend(repo.branchmap()["default"])
try:
tags = repo.tags()
except AttributeError:
try:
# FIXME: For some reason this does not seem to give the tags anymore.
tags = repo._tags
except AttributeError:
tags = []
try:
bookmarks = repo._bookmarks
except AttributeError:
bookmarks = []
if branches: # add branches
overview += "\n<div id='branches'><h2>Branches</h2>\n"
overview += overviewlogstring(ui, repo, branches,
template=templates["commitlog"].replace(
"{branches}", "XXXXX").replace(
"{date|shortdate}", "{branch}").replace(
"XXXXX", "{date|shortdate}").replace(
"{tags}", "XXXXX").replace(
"{date|shortdate}", "{tags}").replace(
"XXXXX", "{date|shortdate}"))
overview += "</div>"
if len(tags) > 1:
overview += "\n<div id='tags'><h2>Tags</h2>\n"
overview += overviewlogstring(ui, repo, [tags[t] for t in tags if t != "tip"],
template=templates["commitlog"].replace(
"{tags}", "XXXXX").replace(
"{date|shortdate}", "{tags}").replace(
"XXXXX", "{date|shortdate}"))
overview += "</div>"
if len(bookmarks):
overview += "\n<div id='bookmarks'><h2>Bookmarks</h2>\n"
overview += overviewlogstring(ui, repo, bookmarks.values(),
template=templates["commitlog"].replace(
"{bookmarks}", "XXXXX").replace(
"{date|shortdate}", "{bookmarks}").replace(
"XXXXX", "{date|shortdate}"))
overview += "</div>"
# add the full readme
ui.debug("[staticsite] writeoverview: readme\n")
overview += "<div id='readme'><h2>"+_("Readme")+"</h2>\n"
overview += readme
overview += "</div>"
# finish the overview
ui.debug("[staticsite] writeoverview: footer\n")
overview += templates["foot"]
indexfile = os.path.join(target, "index.html")
if not contentequals(indexfile, overview):
with open(indexfile, "w") as f:
f.write(overview)
def writelog(ui, repo, target, name):
"""Write the full changelog, in steps of 100."""
commits = os.path.join(target, "commits")
# create the folders
if not os.path.isdir(commits):
os.makedirs(commits)
for i in range(len(repo.changelog)/100):
d = commits+"-"+str(i+1)+"00"
if not os.path.isdir(d):
os.makedirs(d)
# create the log files
t = cmdutil.changeset_templater(ui, repo, patch=False, diffopts=None, mapfile=None, buffered=False)
t.use_template(templates["commitlog"].replace("{relativepath}", "../"))
logs = []
for ck in range(len(repo.changelog)/100+1):
ui.pushbuffer()
if ck:
dd = d
di = str(ck)+"00"
d = commits+"-"+di
logs[-1][-1] += "<p><a href=\"../commits-"+di+"\">earlier</a></p>"
if ck>2:
# the older log gets a reference to the newer one
logs[-1][-1] += "<p><a href=\"../commits-"+str(ck-2)+"00"+"\">later</a></p>"
elif ck>1:
logs[-1][-1] += "<p><a href=\"../commits\">later</a></p>"
logs.append([os.path.join(d, "index.html"), ""])
else:
d = commits
logs.append([os.path.join(d, "index.html"), ""])
logs[-1][-1] += templates["head"].replace("{reponame}", "<a href='../'>"+name+"</a>").replace("{title}", name).replace("{nav}", "").replace("{relpath}", "../")
for c in range(ck*100+1, min(len(repo.changelog)+1, (ck+1)*100)):
ctx = repo.changectx(str(-c))
t.show(ctx)
logs[-1][-1] += ui.popbuffer()
for filepath,data in logs:
data += templates["foot"].replace("{reponame}", "<a href='../'>"+name+"</a>")
if not contentequals(filepath,data):
with open(filepath, "w") as f:
f.write(data)
def getlocalother(repo, ui, otheruri, othername):
"""Get a local clone of the repo identified by uri and name within .hg/paths.
This creates that local clone!
"""
# if we cannot get the changes via bundlerepo, we create a
# local clone in .hg/paths/<othername>-<sha1-of-otheruri> and
# check from there. in case that local clone already exists,
# we tell it to pull there. The hash is necessary to prevent
# collisions when the uri changes.
if othername is None:
othername = ""
urihash = util.sha1(otheruri).hexdigest()
localcopy = os.path.join(repo.root, ".hg", "paths",
othername+"-"+urihash)
# if getting remote changes directly fails, we take the
# completely safe path: dispatch uses the only really stable
# interface: the cli.
if os.path.isdir(localcopy):
req = dispatch.request(["-R", localcopy, "pull", otheruri])
else:
req = dispatch.request(["clone", otheruri, localcopy], ui=ui)
dispatch.dispatch(req)
other = hg.peer(repo, {}, localcopy)
return other
def getincoming(ui, repo, otheruri, other=None, othername=None):
"""Get incoming changes."""
# Note: We cannot just use getcommonincoming and I do not yet know
# how to use its output to get good changes. TODO: do this nicer.
def cleanupfn():
"""non-operation cleanup function (default)."""
pass
# cannot do that for ftp or freenet insertion uris (freenet
# separates insertion and retrieval by private/public key)
isftpuri = otheruri.startswith("ftp://") or otheruri.startswith("ftps://")
isfreenetpriv = _freenetprivkeystring in otheruri
if isftpuri or isfreenetpriv:
chlist = []
return chlist, cleanupfn, other
if not other:
other = hg.peer(repo, {}, otheruri)
ui.pushbuffer() # ignore ui events
source, branches = hg.parseurl(otheruri, None)
revs, checkout = hg.addbranchrevs(repo, other, branches, None)
if revs:
revs = [other.lookup(rev) for rev in revs]
try: # FIXME: This breaks on http repos!
other, chlist, cleanupfn = hg.bundlerepo.getremotechanges(ui, repo, other,
revs, False, False)
except (AttributeError, util.Abort):
other = getlocalother(repo, ui, otheruri, othername)
other, chlist, cleanupfn = hg.bundlerepo.getremotechanges(ui, repo, other,
revs, False, False)
ui.popbuffer()
return chlist, cleanupfn, other
def getoutgoing(ui, repo, otheruri, other=None, othername=None):
def cleanupfn():
"""non-operation cleanup function (default)."""
pass
# cannot do that for ftp or freenet insertion uris (freenet
# separates insertion and retrieval by private/public key)
ui.debug("[staticsite] getoutgoing: checkkeys\n")
isftpuri = otheruri.startswith("ftp://") or otheruri.startswith("ftps://")
isfreenetpriv = "AQECAAE/" in otheruri
if isftpuri or isfreenetpriv:
chlist = []
return chlist, cleanupfn, other
if not other:
ui.debug("[staticsite] getoutgoing: findpeer\n")
other = hg.peer(repo, {}, otheruri)
def outgoingchanges(repo, other):
from mercurial import discovery
fco = discovery.findcommonoutgoing
og = fco(repo, other, force=True)
# print dir(og), og.missingheads, og.missing
ui.debug("[staticsite] getoutgoing: outgoingchanges: og.missing\n")
try:
return og.missing
except AttributeError: # old client
common, outheads = og
o = repo.changelog.findmissing(common=common, heads=outheads)
return o
except Exception as e:
ui.warn("Cannot parse parts of your local history. Likely your local repository is broken. You might be able to fix it by getting a fresh clone and pulling from your current repo. The Error was: " + str(e) + "\n")
other.ui.pushbuffer() # ignore ui events
ui.debug("[staticsite] getoutgoing: outgoingchanges\n")
try:
chlist = outgoingchanges(repo, other)
except (AttributeError, util.Abort):
other.ui.popbuffer()
ui.debug("[staticsite] getoutgoing: local other\n")
other = getlocalother(repo, ui, otheruri, othername)
other.ui.pushbuffer()
ui.debug("[staticsite] getoutgoing: outgoingchanges, local\n")
try:
chlist = outgoingchanges(repo, other)
except Exception as e: # no changes
ui.debug("[staticsite] getoutgoing: outgoingchanges, errer: " + str(e) + "\n")
raise
chlist = []
ui.debug("[staticsite] getoutgoing: popbuffer\n")
other.ui.popbuffer()
ui.debug("[staticsite] getoutgoing: done\n")
return chlist, cleanupfn, other
def getforkinfo(ui, target):
"""Name and Uri of all forks."""
forks = dict(ui.configitems("paths"))
forkinfo = {}
for forkname, forkuri in forks.items():
# ignore the static repo
if os.path.abspath(forkuri) == os.path.abspath(target):
continue
forkinfo[forkname] = forkuri
return forkinfo
def safeuri(uri):
"""Shareable uris: Hide password + hide freenet insert keys."""
uri = util.hidepassword(uri)
if "USK@" in uri and _freenetprivkeystring in uri:
uri = "freenet://USK@******" + uri[uri.index(_freenetprivkeystring)+len(_freenetprivkeystring)-1:]
return uri
def getforkdata(ui, repo, target, name, forkname, forkuri):
"""Write the site for a single fork."""
# make sure the forkdir exists.
other = hg.peer(repo, {}, forkuri)
# incrementally build the html
html = templates["forkhead"].replace(
"{forkname}", forkname).replace(
"{reponame}", name).replace(
"{forkuri}", safeuri(forkuri)).replace(
"{relpath}", "../../")
# prepare the log templater
t = cmdutil.changeset_templater(ui, repo, patch=False, diffopts=None, mapfile=None, buffered=False)
t.use_template(templates["commitlog"].replace(
"{relativepath}", "../../"))
# Add incoming commits
html += "<div id='incoming'><h2>Incoming commits</h2>"
chlist, cleanupfn, localother = getincoming(ui, repo, otheruri=forkuri, other=other, othername=forkname)
# write all missing incoming commits directly from the incoming repo
if chlist:
try:
writecommitsforchlist(ui, localother, target, name, chlist)
writesourcetreeforchlist(ui, localother, target, name, chlist, force=False)
except AttributeError:
if not hasattr(localother, "changelog"):
print "Cannot write commits from fork", forkname, "because the repository type does not support getting the changelog."
else:
raise
ui.pushbuffer()
for ch in chlist:
ctx = localother.changectx(ch)
t.show(ctx)
html += ui.popbuffer()
cleanupfn()
# add outgoing commits
html += "<div id='outgoing'><h2>Outgoing commits</h2>"
chlist, cleanupfn, localother = getoutgoing(ui, repo, forkuri, other=other, othername=forkname)
ui.pushbuffer()
for ch in chlist:
ctx = repo.changectx(ch)
t.show(ctx)
html += ui.popbuffer()
cleanupfn()
html += "</div>"
html += templates["foot"]
return html
def getforkdir(target, forkname):
return os.path.join("forks", forkname)
def writeforks(ui, repo, target, name):
"""Write an info-page for each fork, defined in hg paths.
relevant data: incoming commits, outgoing commits, branches and bookmarks not in fork or not in repo. Short: incoming (commits, branches, bookmarks), outgoing (incoming first means, we consider this repo to be the main repo).
"""
forkinfo = getforkinfo(ui, target)
for forkname, forkuri in forkinfo.items():
# ignore the static repo itself
if os.path.abspath(forkuri) == os.path.abspath(target):
continue
forkdir = getforkdir(target, forkname)
if not os.path.isdir(os.path.join(target, forkdir)):
os.makedirs(os.path.join(target, forkdir))
with open(os.path.join(target, forkdir, "index.html"), "w") as f:
f.write(
getforkdata(ui, repo, target, name, forkname, forkuri))
def writecommitsforchlist(ui, repo, target, name, chlist, force=False):
"""Write all not yet existing commit files."""
commit = os.path.join(target, "commit")
# create the folders
if not os.path.isdir(commit):
os.makedirs(commit)
t = cmdutil.changeset_templater(ui, repo, patch=False, diffopts=None, mapfile=None, buffered=False)
t.use_template(templates["commitlog"].replace("{relativepath}", "../"))
for c in chlist:
ctx = repo.changectx(str(c))
cpath = os.path.join(commit, ctx.hex() + ".html")
if not force and os.path.isfile(cpath):
continue
with open(cpath, "w") as cf:
cf.write(templates["head"].replace("{reponame}", "<a href='../'>"+name+"</a>").replace("{title}", name).replace("{nav}", "").replace("{relpath}", "../"))
ui.pushbuffer()
t.show(ctx)
cf.write(ui.popbuffer())
ui.pushbuffer()
commands.diff(ui, repo, change=str(c), git=True)
cf.write("<pre>"+ui.popbuffer().replace("<", "<")+"</pre>")
cf.write(templates["foot"].replace("{reponame}", "<a href='../'>"+name+"</a>"))
def writecommits(ui, repo, target, name, force=False):
"""Write all not yet existing commit files."""
chlist = range(len(repo.changelog))
return writecommitsforchlist(ui, repo, target, name, chlist, force=force)
#: html escape codes thanks to http://wiki.python.org/moin/EscapingHtml
htmlescapetable = {
"&": "&",
'"': '"',
"'": "'",
">": ">",
"<": "<",
}
def htmlescape(text):
"""Produce entities within text."""
return "".join(htmlescapetable.get(c,c) for c in text)
def writebugs(ui, repo, target, name):
"""Write bug information, a listing and the details for each bug."""
bugdir = os.path.join(target, "bugs")
# create the bugs folder
if not os.path.isdir(bugdir):
os.makedirs(bugdir)
# get all bugs
openbugs, resolvedbugs = getbugs(ui, repo)
# write the bugs list
bugslist = os.path.join(bugdir, "index.html")
content = "<h2 id=\"open\">Open Bugs</h2>\n<ul>"
for bug in openbugs:
content += "<li><a href=\"" + bug.fullid + ".html\">" + bug.shortid + "</a> - " + htmlescape(bug.description) + "</li>\n"
content += "</ul>\n"
content += "<h2 id=\"resolved\">Resolved Bugs</h2>\n<ul>"
for bug in resolvedbugs:
content += "<li><a href=\"" + bug.fullid + ".html\">" + bug.shortid + "</a> - " + htmlescape(bug.description) + "</li>\n"
content += "</ul>\n"
with open(bugslist, "w") as f:
f.write(templates["head"].replace("{reponame}", "<a href='../'>"+name+"</a>").replace("{title}", name).replace("{nav}", "").replace("{relpath}", "../"))
f.write(content)
f.write(templates["foot"].replace("{reponame}", "<a href='../'>"+name+"</a>"))
# write all bug details
for bug in openbugs + resolvedbugs:
bugsfile = os.path.join(bugdir, bug.fullid + ".html")
body = "<h2>" + bug.description + "</h2>\n"
body += "<pre>" + bug.details + "</pre>\n"
body += "<hr>"
body += "- <a href=\"index.html\">" + _("all bugs") + "</a> -"
content = templates["head"].replace("{reponame}", "<a href='../'>"+name+"</a>").replace("{title}", name).replace("{nav}", "").replace("{relpath}", "../")
content += body
content += templates["foot"].replace("{reponame}", "<a href='../'>"+name+"</a>")
try:
if not contentequals(bugsfile, content):
raise Exception("bugfile content does not match content to write. Needs overwriting.")
except Exception as e: # generic exception: If anything went wrong, we need to write the file.
print "Overwriting bugs file", bugsfile, "; Reason:", e
with open(bugsfile, "w") as bf:
bf.write(content)
def escapename(filename):
"""escape index.html as .index.html and .ind… as ..ind… and so fort."""
if _indexregexp.match(filename) is not None:
return "." + filename
else: return filename
def parsesrcdata(data):
"""Parse a src file into a html file."""
return "<pre class=\"prettyprint linenums\">"+data.replace("<", "<")+"</pre>"
def srcpath(target, ctx, filename):
"""Get the relative path to the static sourcefile for an already escaped filename."""
return os.path.join(target,"src",ctx.hex(),filename+".html")
def rawpath(target, ctx, filename):
"""Get the relative path to the static sourcefile for an already escaped filename."""
return os.path.join(target,"raw",ctx.hex(),filename)
def ctxdiffstat(ui, repo, ctx):
"""Get the diffstat of a change context."""
if "color" in extensions.enabled():
command = "log -r " + ctx.hex() + " --stat --color=never"
else:
command = "log -r " + ctx.hex() + " --stat"
req = dispatch.request(command.split(), ui=ui, repo=repo)
ui.pushbuffer()
dispatch.dispatch(req)
# FIXME: remove the color in an elegant way instead of fudging like this.
return ui.popbuffer().replace(
"[0;33m","").replace(
"[0;32m","").replace(
"[0m", "").replace(
"[0;31m", "").replace(
"[0m","")
def createindex(ui, repo, target, ctx):
"""Create an index page for the changecontext: the commit message + the user + all files in the changecontext."""
# first the head
index = templates["manifesthead"].replace(
"{hex}", ctx.hex()).replace(
"{desc}", ctx.description()).replace(
"{user}", ctx.user()).replace(
"{diffstat}", ctxdiffstat(ui, repo, ctx))
# then the files
index += "<ul>"
for filename in ctx:
filectx = ctx[filename]
lasteditctx = filectx.filectx(filectx.filerev())
index += "<li><a href='../../"+ os.path.join("src",lasteditctx.hex(), escapename(filename)+".html") + "'>" + filename + "</a>"# (<a href='../../" + os.path.join("raw",lasteditctx.hex(), filename) + "'>raw</a>)</li>"
index += "</ul>"
return index
def writesourcetreeforchlist(ui, repo, target, name, chlist, force=False, rawfiles=False):
"""Write manifests for all commits and websites for all files.
* For each file, write sites for all revisions where the file was changed: under src/<hex>/path as html site (with linenumbers and maybe colored source), under raw/<hex>/<path> as plain files. If there is an index.html file, write it as .index.html. If there also is .index.html, turn it to ..index.html, …
* For each commit write an index with links to the included files at their latest revisions before/at the commit.
"""
# first write all files in all commits.
for c in chlist:
ctx = repo.changectx(str(c))
for filename in ctx.files():
try:
filectx = ctx.filectx(filename)
except LookupError, e:
ui.warn("File not found, likely moved ", e, "\n")
if rawfiles:
# first write the raw data
filepath = rawpath(target,ctx,filectx.path())
# skip already existing files
if not force and os.path.isfile(filepath):
continue
try:
os.makedirs(os.path.dirname(filepath))
except OSError: pass # exists
with open(filepath, "w") as f:
f.write(filectx.data())
# then write it as html
_filenameescaped = escapename(filectx.path())
numberofslashes = len([i for i in _filenameescaped if i == "/"])
filepath = srcpath(target,ctx,_filenameescaped)
if not force and os.path.isfile(filepath):
continue
try:
os.makedirs(os.path.dirname(filepath))
except OSError: pass # exists
with open(filepath, "w") as f:
f.write(templates["srchead"].replace("{filetitle}", name+": " + filename).replace("href=\"../../", "href=\"../../" + "../"*numberofslashes))
f.write(parsesrcdata(filectx.data()))
f.write(templates["foot"].replace("{reponame}", name))
# then write manifests for all commits
for c in chlist:
ctx = repo.changectx(str(c))
filepath = os.path.join(target,"src",ctx.hex(),"index.html")
# skip already existing files
if not force and os.path.isfile(filepath):
continue
try:
os.makedirs(os.path.dirname(filepath))
except OSError: pass # exists
content = templates["head"].replace("{reponame}", "<a href='../../'>"+name+"</a>").replace("{title}", name).replace("{nav}", "").replace("{relpath}", "../../")
content += createindex(ui, repo, target, ctx)
content += templates["foot"].replace("{reponame}", "<a href='../../'>"+name+"</a>")
with open(filepath, "w") as f:
f.write(content)
def writesourcetree(ui, repo, target, name, force, rawfiles=False):
"""Write manifests for all commits and websites for all files.
* For each file, write sites for all revisions where the file was changed: under src/<hex>/path as html site (with linenumbers and maybe colored source), under raw/<hex>/<path> as plain files. If there is an index.html file, write it as .index.html. If there also is .index.html, turn it to ..index.html, …
* For each commit write an index with links to the included files at their latest revisions before/at the commit.
"""
chlist = range(len(repo.changelog))
return writesourcetreeforchlist(ui, repo, target, name, chlist, force=force, rawfiles=rawfiles)
def parsesite(ui, repo, target, **opts):
"""Create the static folder."""
idfile = os.path.join(target, _staticidentifier)
if not os.path.isdir(target):
# make sure the target exists
os.makedirs(target)
else: # make sure it is a staticrepo
if not os.path.isfile(idfile):
if not ui.prompt("The target folder " + target + " has not yet been used as static repo. Really use it? (y/N)", default="n").lower() in ["y", "yes"]:
return
with open(idfile, "w") as i:
i.write("")
if opts["sitename"]:
name = opts["sitename"]
elif target != "static": name = target
else: name = os.path.basename(repo.root)
# first the stylesheets
screenstyle = opts["screenstyle"]
screenfile = os.path.join(target, "style.css")
if screenstyle and not samefilecontent(screenstyle, screenfile):
shutil.copyfile(screenstyle, screenfile)
elif not contentequals(screenfile,templates["screenstyle"]):
with open(screenfile, "w") as f:
f.write(templates["screenstyle"])
printstyle = opts["printstyle"]
printfile = os.path.join(target, "print.css")
if printstyle and not samefilecontent(printstyle, printfile):
shutil.copyfile(printstyle, printfile)
elif not contentequals(printfile, templates["printstyle"]):
with open(printfile, "w") as f:
f.write(templates["printstyle"])
# add google code prettify for source code highlighting
for pretty in ["prettify.js", "prettify.css"]:
highlightingsrc = os.path.join(os.path.dirname(__file__), pretty)
highlightingfile = os.path.join(target, pretty)
if not os.path.isfile(highlightingfile) or not samefilecontent(highlightingsrc, highlightingfile):
shutil.copyfile(highlightingsrc, highlightingfile)
ui.debug("[staticsite] writeoverview\n")
# then the overview
writeoverview(ui, repo, target, name)
ui.debug("[staticsite] writelog\n")
# and the log
writelog(ui, repo, target, name)
ui.debug("[staticsite] writecommits\n")
# and all commit files
writecommits(ui, repo, target, name, force=opts["force"])
ui.debug("[staticsite] writesourcetree\n")
# and all file data
writesourcetree(ui, repo, target, name, force=opts["force"])
ui.debug("[staticsite] writeforks\n")
# and all forks
writeforks(ui, repo, target, name)
ui.debug("[staticsite] writebugs\n")
# and all bugs
writebugs(ui, repo, target, name)
def addrepo(ui, repo, target, bookmarks, force):
"""Add the repo to the target and make sure it is up to date."""
try:
commands.init(ui, dest=target)
except mercurial.error.RepoError, e:
# already exists
pass
ui.pushbuffer()
if bookmarks:
commands.push(ui, repo, dest=target, bookmark=repo._bookmarks, force=force)
else:
commands.push(ui, repo, dest=target, force=force)
ui.popbuffer()
def upload(ui, repo, target, ftpstring, force):
"""upload the repo to the FTP server identified by the ftp string."""
if ftpstring.startswith("ftp://"):
ftpstring = ftpstring[len("ftp://"):]
try:
user, password = ftpstring.split("@")[0].split(":")
serverandpath = "@".join(ftpstring.split("@")[1:])
except ValueError:
ui.warn(_("FTP-upload: No @ in FTP-Url. We try anonymous access.\n"))
user, password = "anonymous", ""
serverandpath = ftpstring # no @, so we just take the whole string
server = serverandpath.split("/")[0]
ftppath = "/".join(serverandpath.split("/")[1:])
timeout = 10
try:
ftp = ftplib.FTP_TLS(server, user, password, "", timeout=timeout)
ftp.prot_p()
except socket.timeout:
ui.warn(_("connection to "), server, _(" timed out after "), timeout, _(" seconds.\n"))
return
ui.status(ftp.getwelcome(), "\n")
# create the target dir.
serverdir = os.path.dirname(ftppath)
serverdirparts = ftppath.split("/")
sd = serverdirparts[0]
if not sd in ftp.nlst():
ftp.mkd(sd)
for sdp in serverdirparts[1:]:
sdo = sd
sd = os.path.join(sd, sdp)
if not sd in ftp.nlst(sdo):
ftp.mkd(sd)
ftp.cwd(ftppath)
if not ftp.pwd() == "/" + ftppath:
ui.warn(_("not in the correct ftp directory. Cowardly bailing out.\n"))
return
#ftp.dir()
#return
ftpfeatures = ftp.sendcmd("FEAT")
featuremtime = " MDTM" in ftpfeatures.splitlines()
_ftplistcache = set()
for d, dirnames, filenames in os.walk(target):
for filename in filenames:
localfile = os.path.join(d, filename)
serverfile = localfile[len(target)+1:]
serverdir = os.path.dirname(serverfile)
serverdirparts = serverdir.split("/")
# print serverdirparts, serverfile
with open(localfile, "rb") as f:
sd = serverdirparts[0]
if sd and not sd in _ftplistcache: # should happen only once per superdir
_ftplistcache.update(set(ftp.nlst()))
if sd and not sd in _ftplistcache:
try:
ui.status(_("creating directory "), sd, "\n")
ftp.mkd(sd)
_ftplistcache.add(sd)
except ftplib.error_perm, resp:
ui.warn(_("could not create directory "), sd, ": " , resp, "\n")
else: _ftplistcache.add(sd)
for sdp in serverdirparts[1:]:
sdold = sd
sd = os.path.join(sd, sdp)
#print sd, sdp
#print ftp.nlst(sdold)
if sd and not sd in _ftplistcache: # should happen only once per superdir
_ftplistcache.update(set(ftp.nlst(sdold)))
if sd and not sd in _ftplistcache:
try:
ui.status(_("creating directory "), sd, "\n")
ftp.mkd(sd)
_ftplistcache.add(sd)
except ftplib.error_perm, resp:
ui.warn(_("could not create directory "),
sd, ": " , resp, "\n")
if not serverfile in _ftplistcache: # should happen for existing files only once per dir.
_ftplistcache.update(set(ftp.nlst(serverdir)))
if not serverfile in _ftplistcache or force:
if force:
ui.status(_("uploading "), serverfile,
_(" because I am forced to.\n"))
else:
ui.status(_("uploading "), serverfile,
_(" because it is not yet online.\n"))
ftp.storbinary("STOR "+ serverfile, f)
else:
# reupload the file if the file on the server is older than the local file.
if featuremtime:
ftpmtime = ftp.sendcmd("MDTM " + serverfile).split()[1]
localmtime = os.stat(localfile).st_mtime
localmtimestr = datetime.datetime.utcfromtimestamp(localmtime).strftime("%Y%m%d%H%M%S")
newer = int(localmtimestr) > int(ftpmtime)
if newer:
ui.status(_("uploading "), serverfile,
_(" because it is newer than the file on the FTP server.\n"))
ftp.storbinary("STOR "+ serverfile, f)
def staticsite(ui, repo, target=None, **opts):
"""Create a static copy of the repository and/or upload it to an FTP server."""
if repo.root == target:
ui.warn(_("static target repo can’t be the current repo"))
return
if not target: target = "static"
#print repo["."].branch()
# add the hg repo to the static site
# currently we need to either include all bookmarks or not, because we don’t have the remote repo when parsing the site.
# TODO: I don’t know if that is the correct way to go. Maybe always push all.
bookmark = opts["bookmark"]
addrepo(ui, repo, target, bookmark, force=opts["force"])
# first: just create the site.
parsesite(ui, repo, target, **opts)
if opts["upload"]:
# upload the repo
upload(ui, repo, target, opts["upload"], opts["force"])
cmdtable = {
# "command-name": (function-call, options-list, help-string)
"site": (staticsite,
[
#('r', 'rev', None, 'parse the given revision'),
#('a', 'all', None, 'parse all revisions (requires much space)'),
('n', 'sitename', "", 'the repo name. Default: folder or last segment of the repo-path.'),
('u', 'upload', "", 'upload the repo to the given ftp host. Format: user:password@host/path/to/dir'),
('f', 'force', False, 'force recreating all commit files. Slow.'),
('s', 'screenstyle', "", 'use a custom stylesheet for display on screen'),
('p', 'printstyle', "", 'use a custom stylesheet for printing'),
('B', 'bookmark', False, 'include the bookmarks')],
"[options] [folder]")
}
## add ftp as scheme to be handled by this plugin.
wrapcmds = { # cmd: generic, target, fixdoc, ppopts, opts
'push': (False, None, False, False, [
('', 'staticsite', None, 'show parent svn revision instead'),
])
}
## Explicitely wrap functions to change local commands in case the remote repo is an FTP repo. See mercurial.extensions for more information.
# Get the module which holds the functions to wrap
# the new function: gets the original function as first argument and the originals args and kwds.
def findcommonoutgoing(orig, *args, **opts):
repo = args[1]
capable = getattr(repo, 'capable', lambda x: False)
if capable('ftp'):
class fakeoutgoing(object):
def __init__(self):
self.excluded = []
self.missing = []
self.commonheads = []
return fakeoutgoing()
else:
return orig(*args, **opts)
# really wrap the functions
extensions.wrapfunction(discovery, 'findcommonoutgoing', findcommonoutgoing)
# explicitely wrap commands in case the remote repo is an FTP repo.
def ftppush(orig, *args, **opts):
try:
ui, repo, path = args
path = ui.expandpath(path)
except ValueError: # no ftp string
ui, repo = args
path = ui.expandpath('default-push', 'default')
# only act differently, if the target is an FTP repo.
if not path.startswith("ftp"):
return orig(*args, **opts)
# first create the site at ._site
target = "._site"
ftpstring = path.replace("ftp://", "")
ftpstring = path.replace("ftps://", "")
# fix the options to fit those of the site command
opts["name"] = opts["sitename"]
opts["upload"] = ftpstring
staticsite(ui, repo, target, **opts)
return 0
# really wrap the command
siteopts = [('', 'sitename', "", 'staticsite: the title of the site. Default: folder or last segment of the repo-path.'),
('', 'screenstyle', "", 'use a custom stylesheet for display on screen'),
('', 'printstyle', "", 'use a custom stylesheet for printing')]
entry = extensions.wrapcommand(commands.table, "push", ftppush)
entry[1].extend(siteopts)
# Starting an FTP repo. Not yet used, except for throwing errors for missing commands and faking the lock.
# TODO: repo -> peer
from mercurial import util
try:
from mercurial.peer import peerrepository
except ImportError:
from mercurial.repo import repository as peerrepository
try:
from mercurial.error import RepoError
except ImportError:
from mercurial.repo import RepoError
# TODO: repo -> peer
class FTPRepository(peerrepository):
def __init__(self, ui, path, create):
self.create = create
self.ui = ui
self.path = path
self.capabilities = set(["ftp", "ftps"])
def lock(self):
"""We cannot really lock FTP repos, yet.
TODO: Implement as locking the repo in the static site folder."""
class DummyLock:
def release(self):
pass
l = DummyLock()
return l
def url(self):
return self.path
def lookup(self, key):
return key
def cancopy(self):
return False
def heads(self, *args, **opts):
"""
Whenever this function is hit, we abort. The traceback is useful for
figuring out where to intercept the functionality.
"""
raise util.Abort('command heads unavailable for FTP repositories')
def pushkey(self, namespace, key, old, new):
return False
def listkeys(self, namespace):
return {}
def push(self, remote, force=False, revs=None, newbranch=None):
raise util.Abort('command push unavailable for FTP repositories')
def pull(self, remote, heads=[], force=False):
raise util.Abort('command pull unavailable for FTP repositories')
def findoutgoing(self, remote, base=None, heads=None, force=False):
raise util.Abort('command findoutgoing unavailable for FTP repositories')
class RepoContainer(object):
def __init__(self):
pass
def __repr__(self):
return '<FTPRepository>'
def instance(self, ui, url, create):
# Should this use urlmod.url(), or is manual parsing better?
#context = {}
return FTPRepository(ui, url, create)
hg.schemes["ftp"] = RepoContainer()
hg.schemes["ftps"] = RepoContainer()
def test():
import subprocess as sp
def showcall(args):
print args
sp.call(args)
os.chdir(os.path.dirname(__file__))
# just check if loading the extension works
showcall(["hg", "--config", "extensions.site="+__file__])
# check if I can create a site
showcall(["hg", "--config", "extensions.site="+__file__, "site", "-f", "-B", "-n", "mysite"])
# check if uploading works: Only a valid test, if you have a
# post-push hook which does the uploading
# showcall(["hg", "--config", "extensions.site="+__file__, "push"])
# check if push directly to ftp works. Requires the path draketo
# to be set up in .hg/hgrc as ftp://user:password/path
# showcall(["hg", "--config", "extensions.site="+__file__, "push", "draketo", "--sitename", "hg site extension"])
if __name__ == "__main__":
test()