Girocco/Util.pm: allow bare '+' in URLs
[girocco.git] / jobd / update.sh
blobc46083a784285408e32be0ca875c2f680579aec8
1 #!/bin/sh
3 . @basedir@/shlib.sh
5 set -e
7 if [ $# -ne 1 ]; then
8 echo "Usage: update.sh projname" >&2
9 exit 1
12 # date -R is linux-only, POSIX equivalent is '+%a, %d %b %Y %T %z'
13 datefmt='+%a, %d %b %Y %T %z'
15 git_fetch_q_progress() {
16 PATH="$var_git_exec_path:$cfg_basedir/bin:$PATH" @basedir@/jobd/git-fetch-q-progress.sh "$@"
19 # freshen_loose_objects full-sha ...
20 # if "$n" is a loose object, set its modification time to now
21 # otherwise silently do nothing with no error. To facilitate conversion
22 # of mirror projects to push projects we also add group write permission.
23 freshen_loose_objects() {
24 _list=
25 for _sha; do
26 _fn="${_sha#??}"
27 _shard="${_sha%$_fn}"
28 _list="$_list objects/$_shard/$_fn"
29 done
30 if [ -n "$_list" ]; then
31 chmod ug+w $_list 2>/dev/null || :
32 touch -c $_list 2>/dev/null || :
36 # darcs fast-export | git fast-import with error handling
37 git_darcs_fetch() (
38 set_utf8_locale
39 _err1=
40 _err2=
41 exec 3>&1
42 { read -r _err1 || :; read -r _err2 || :; } <<-EOT
44 exec 4>&3 3>&1 1>&4 4>&-
46 _e1=0
47 "$cfg_basedir"/bin/darcs-fast-export \
48 --export-marks="$(pwd)/dfe-marks" \
49 --import-marks="$(pwd)/dfe-marks" "$1" 3>&- || _e1=$?
50 echo $_e1 >&3
51 } |
53 _e2=0
54 git fast-import \
55 --export-marks="$(pwd)/gfi-marks" \
56 --export-pack-edges="$(pwd)/gfi-packs" \
57 --import-marks="$(pwd)/gfi-marks" \
58 --force 3>&- || _e2=$?
59 echo $_e2 >&3
62 EOT
63 exec 3>&-
64 [ "$_err1" = 0 ] && [ "$_err2" = 0 ]
65 return $?
68 # bzr fast-export | git fast-import with error handling
69 git_bzr_fetch() (
70 set_utf8_locale
71 BZR_LOG=/dev/null
72 export BZR_LOG
73 _err1=
74 _err2=
75 exec 3>&1
76 { read -r _err1 || :; read -r _err2 || :; } <<-EOT
78 exec 4>&3 3>&1 1>&4 4>&-
80 _e1=0
81 bzr fast-export --plain \
82 --export-marks="$(pwd)/bfe-marks" \
83 --import-marks="$(pwd)/bfe-marks" "$1" 3>&- || _e1=$?
84 echo $_e1 >&3
85 } |
87 _e2=0
88 git fast-import \
89 --export-marks="$(pwd)/gfi-marks" \
90 --export-pack-edges="$(pwd)/gfi-packs" \
91 --import-marks="$(pwd)/gfi-marks" \
92 --force 3>&- || _e2=$?
93 echo $_e2 >&3
96 EOT
97 exec 3>&-
98 [ "$_err1" = 0 ] && [ "$_err2" = 0 ]
99 return $?
102 [ -n "$cfg_mirror" ] || { echo "Mirroring is disabled" >&2; exit 0; }
104 umask 002
105 [ "$cfg_permission_control" != "Hooks" ] || umask 000
106 clean_git_env
108 proj="${1%.git}"
109 cd "$cfg_reporoot/$proj.git"
111 # Activate a mini-gc if needed
112 # We do this here as well as after a successful fetch so that if we're stuck
113 # in a fetch loop where fetches are succeeding in fetching new packs but the
114 # ref update is failing for some reason (perhaps a non-commit under refs/heads)
115 # and a previous invokation therefore had a "bang" exit then we will still
116 # get the .needsgc flag set in a timely fashion to avoid excess pack build up.
117 check_and_set_needsgc
119 trap 'if [ $? != 0 ]; then echo "update failed dir: $PWD" >&2; fi; rm -f "$bang_log"' EXIT
120 trap 'exit 130' INT
121 trap 'exit 143' TERM
123 if [ "${force_update:-0}" = "0" ] && check_interval lastrefresh $cfg_min_mirror_interval; then
124 progress "= [$proj] update skip (last at $(config_get lastrefresh))"
125 exit 0
127 if [ -e .nofetch ]; then
128 progress "x [$proj] update disabled (.nofetch exists)"
129 exit 0
131 progress "+ [$proj] update ($(date))"
133 # Any pre-existing FETCH_HEAD from a previous fetch, failed or not, is garbage
134 rm -f FETCH_HEAD
136 # Remove any stale ref locks
137 clear_stale_ref_locks
139 # A previous failed update attempt can leave a huge tmp_pack_XXXXXX file behind.
140 # Since no pushes are allowed to mirrors, we know that any such files that exist
141 # at this point in time are garbage and can be safely deleted, we do not even
142 # need to check how old they are. A tmp_idx_XXXXXX file is also created during
143 # the later stages of the fetch process, so we kill any of those as well.
144 find -L objects/pack -maxdepth 1 -type f -name "tmp_pack_?*" -exec rm -f '{}' + || :
145 find -L objects/pack -maxdepth 1 -type f -name "tmp_idx_?*" -exec rm -f '{}' + || :
147 # Make sure we have a reflogs subdirectory and abort the update if not
148 # This should not count as a normal "bang" failure if unsuccessful
149 [ -d reflogs ] || mkdir -p reflogs >/dev/null 2>&1 || :
150 [ -d reflogs ]
152 keep_bang_log=
153 do_check_after_refs=1
154 bang_setup
155 bang_action="update"
156 bang_trap() {
157 if [ -n "$1" ]; then
158 # Throttle retries
159 # Since gitweb shows the .last_refresh date, it's safe to update
160 # gitweb.lastrefresh to throttle the updates w/o corrupting the
161 # last refresh date display on the gitweb summary page
162 # It's therefore important that we do NOT touch .last_refresh here
163 config_set lastrefresh "$(date "$datefmt")"
167 bang echo "Project: $proj"
168 bang echo " Date: $(TZ=UTC date '+%Y-%m-%d %T UTC')"
169 bang echo ""
170 mail="$(config_get owner)" || :
171 url="$(config_get baseurl)" || :
172 case "$url" in *" "*|*" "*|"")
173 bang_eval 'echo "Bad mirror URL (\"$url\")"; ! :'
174 exit 1
175 esac
176 bang echo "Mirroring from URL \"$url\""
177 bang echo ""
178 statusok="$(git config --bool gitweb.statusupdates 2>/dev/null || echo true)"
179 mailaddrs=
180 [ "$statusok" = "false" ] || [ -z "$mail" ] || mailaddrs="$mail"
181 [ -z "$cfg_admincc" ] || [ "$cfg_admincc" = "0" ] || [ -z "$cfg_admin" ] ||
182 if [ -z "$mailaddrs" ]; then mailaddrs="$cfg_admin"; else mailaddrs="$mailaddrs,$cfg_admin"; fi
184 bang_eval "git for-each-ref --format '%(refname) %(objectname)' >.refs-temp"
185 bang_eval "LC_ALL=C sort -b -k1,1 <.refs-temp >.refs-before"
187 check_after_refs() {
188 [ -n "$do_check_after_refs" ] || return 0
189 bang_eval "git for-each-ref --format '%(refname) %(objectname)' >.refs-temp"
190 bang_eval "LC_ALL=C sort -b -k1,1 <.refs-temp >.refs-after"
191 refschanged=
192 cmp -s .refs-before .refs-after || refschanged=1
193 do_check_after_refs=
196 ! [ -e .delaygc ] || >.allowgc || :
198 # Make sure we don't get any unwanted loose objects
199 # Starting with Git v2.10.0 fast-import can generate loose objects unless we
200 # tweak its configuration to prevent that
201 git_add_config 'fetch.unpackLimit=1'
202 # Note the git config documentation is wrong
203 # transfer.unpackLimit, if set, overrides fetch.unpackLimit
204 git_add_config 'transfer.unpackLimit=1'
205 # But not the Git v2.10.0 and later fastimport.unpackLimit which improperly uses <= instead of <
206 git_add_config 'fastimport.unpackLimit=0'
208 # remember the starting time so we can easily combine fetched loose objects
209 # we sleep for 1 second after creating .needspack to make sure all objects are newer
210 if ! [ -e .needspack ]; then
211 rm -f .needspack
212 >.needspack
213 sleep 1
216 case "$url" in
217 svn://* | svn+http://* | svn+https://* | svn+file://* | svn+ssh://*)
218 [ -n "$cfg_mirror_svn" ] || { echo "Mirroring svn is disabled" >&2; exit 0; }
219 # Allow the username to be specified in the "svn-credential.svn.username"
220 # property and the password in the "svn-credential.svn.password" property
221 # Use an 'anonsvn' username by default as is commonly used for anonymous svn
222 # Default the password to the same as the username
223 # The password property will be ignored unless a username has been specified
224 if svnuser="$(git config --get svn-credential.svn.username)" && [ -n "$svnuser" ]; then
225 if ! svnpass="$(git config --get svn-credential.svn.password)"; then
226 svnpass="$svnuser"
228 url1="${url#*://}"
229 url1="${url1%%/*}"
230 case "$url1" in ?*"@"?*)
231 urlsch="${url%%://*}"
232 url="$urlsch://${url#*@}"
233 esac
234 else
235 # As a fallback, check in the URL, just in case
236 url1="${url#*://}"
237 url1="${url1%%/*}"
238 svnuser=
239 case "$url1" in ?*"@"?*)
240 urlsch="${url%%://*}"
241 url="$urlsch://${url#*@}"
242 url1="${url1%%@*}"
243 svnuser="${url1%%:*}"
244 if [ -n "$svnuser" ]; then
245 svnpass="$svnuser"
246 case "$url1" in *":"*)
247 svnpass="${url1#*:}"
248 esac
250 esac
251 if [ -z "$svnuser" ]; then
252 svnuser="anonsvn"
253 svnpass="anonsvn"
256 GIT_ASKPASS_PASSWORD="$svnpass"
257 export GIT_ASKPASS_PASSWORD
258 # Update the git svn url to match baseurl but be cognizant of any
259 # needed prefix changes. See the comments in taskd/clone.sh about
260 # why we need to put up with a prefix in the first place.
261 case "$url" in svn+ssh://*) svnurl="$url";; *) svnurl="${url#svn+}";; esac
262 svnurl="${svnurl%/}"
263 svnurlold="$(config_get svnurl)" || :
264 if [ "$svnurl" != "$svnurlold" ]; then
265 # We better already have an svn-remote.svn.fetch setting
266 bang test -n "$(git config --get-all svn-remote.svn.fetch)" || :
267 # the only way to truly know what the proper prefix is
268 # is to attempt a fresh git-svn init -s on the new url
269 rm -rf svn-new-url || :
270 # We require svn info to succeed on the URL otherwise it's
271 # simply not a valid URL and without using -s on the init it
272 # will not otherwise be tested until the fetch
273 bang eval 'svn --non-interactive --username "$svnuser" --password "$svnpass" info "$svnurl" >/dev/null'
274 bang mkdir svn-new-url
275 GIT_DIR=svn-new-url bang git init --bare --quiet
276 # We initially use -s for the init which will possibly shorten
277 # the URL. However, the shortening can fail if a password is
278 # not required for the longer version but is for the shorter,
279 # so try again without -s if the -s version fails.
280 cmdstr='git svn init --username="$svnuser" --prefix "" -s "$svnurl" <"$mtlinesfile" >/dev/null 2>&1 || '
281 cmdstr="$cmdstr"'git svn init --username="$svnuser" --prefix "" "$svnurl" <"$mtlinesfile" >/dev/null 2>&1'
282 GIT_DIR=svn-new-url bang eval "$cmdstr"
283 gitsvnurl="$(GIT_DIR=svn-new-url git config --get svn-remote.svn.url)" || :
284 gitsvnfetch="$(GIT_DIR=svn-new-url git config --get svn-remote.svn.fetch)" || :
285 gitsvnprefixnew="${gitsvnfetch%%:*}"
286 gitsvnsuffixnew="${gitsvnprefixnew##*/}"
287 gitsvnprefixnew="${gitsvnprefixnew%$gitsvnsuffixnew}"
288 rm -rf svn-new-url || :
289 # Using GIT_DIR= with bang leaves it set to svn-new-url, so reset it to .
290 GIT_DIR=.
291 if [ "$gitsvnurl" != "$(git config --get svn-remote.svn.url || :)" ]; then
292 # The url has been changed.
293 # We must update the url and replace the prefix on all config items
294 gitsvnfetch="$(git config --get-all svn-remote.svn.fetch | head -1)" || :
295 gitsvnprefixold="${gitsvnfetch%%:*}"
296 gitsvnsuffixold="${gitsvnprefixold##*/}"
297 gitsvnprefixold="${gitsvnprefixold%$gitsvnsuffixold}"
298 git config --remove-section 'svn-remote.svnnew' 2>/dev/null || :
299 git config 'svn-remote.svnnew.url' "$gitsvnurl"
300 git config --get-regexp '^svn-remote\.svn\.' |
301 while read -r sname sval; do
302 case "$sname" in
303 svn-remote.svn.fetch|svn-remote.svn.branches|svn-remote.svn.tags)
304 sname="${sname#svn-remote.svn.}"
305 sval="${sval#$gitsvnprefixold}"
306 bang git config --add "svn-remote.svnnew.$sname" "${gitsvnprefixnew}$sval"
307 esac
308 done
309 test $? -eq 0
310 bang git config -f svn/.metadata svn-remote.svn.reposRoot "$gitsvnurl"
311 bang git config --remove-section svn-remote.svn
312 bang git config --rename-section svn-remote.svnnew svn-remote.svn
314 bang config_set svnurl "$svnurl"
316 # remove any stale *.lock files greater than 1 hour old in case
317 # git-svn was killed on the last update because it took too long
318 find -L svn -type f -name '*.lock' -mmin +60 -exec rm -f '{}' + 2>/dev/null || :
319 GIROCCO_DIVERT_GIT_SVN_AUTO_GC=1
320 export GIROCCO_DIVERT_GIT_SVN_AUTO_GC
321 unset GIROCCO_SUPPRESS_AUTO_GC_UPDATE
322 saveconfig="$GIT_CONFIG_PARAMETERS"
323 git_add_config 'gc.auto=1'
324 git_add_config 'gc.autoPackLimit=1'
325 GIT_DIR=. bang git svn fetch --log-window-size=$var_log_window_size --username="$svnuser" --quiet <"$mtlinesfile"
326 GIROCCO_SUPPRESS_AUTO_GC_UPDATE=1
327 export GIROCCO_SUPPRESS_AUTO_GC_UPDATE
328 unset GIROCCO_DIVERT_GIT_SVN_AUTO_GC
329 unset GIT_CONFIG_PARAMETERS
330 [ -z "$saveconfig" ] || {
331 GIT_CONFIG_PARAMETERS="$saveconfig"
332 export GIT_CONFIG_PARAMETERS
334 # git svn does not preserve group permissions in the svn subdirectory
335 chmod -R ug+rw,o+r svn
336 # git svn also leaves behind ref turds that end with @nnn
337 # We get rid of them now
338 git for-each-ref --format='%(refname)' |
339 LC_ALL=C sed '/^..*@[1-9][0-9]*$/!d; s/^/delete /' |
340 git_updateref_stdin
341 unset GIT_ASKPASS_PASSWORD
343 darcs://* | darcs+http://* | darcs+https://*)
344 [ -n "$cfg_mirror_darcs" ] || { echo "Mirroring darcs is disabled" >&2; exit 0; }
345 case "$url" in
346 darcs://*) darcsurl="http://${url#darcs://}";;
347 *) darcsurl="${url#darcs+}";;
348 esac
349 # remove any stale lock files greater than 1 hour old in case
350 # darcs_fast_export was killed on the last update because it took too long
351 find -L *.darcs -maxdepth 2 -type f -name 'lock' -mmin +60 -exec rm -f '{}' + 2>/dev/null || :
352 bang git_darcs_fetch "$darcsurl"
354 bzr://*)
355 [ -n "$cfg_mirror_bzr" ] || { echo "Mirroring bzr is disabled" >&2; exit 0; }
356 bzrurl="${url#bzr://}"
357 bang git_bzr_fetch "$bzrurl"
359 hg+http://* | hg+https://* | hg+file://* | hg+ssh://*)
360 [ -n "$cfg_mirror_hg" ] || { echo "Mirroring hg is disabled" >&2; exit 0; }
361 # We just remove hg+ here, so hg+http://... becomes http://...
362 hgurl="${url#hg+}"
363 # Fetch any new updates
364 bang hg -R "$(pwd)/repo.hg" pull
365 # Do the fast-export | fast-import
366 bang git_hg_fetch
369 [ "$url" = "$(git config --get remote.origin.url || :)" ] || bang config_set_raw remote.origin.url "$url"
370 pruneopt=--prune
371 [ "$(git config --bool fetch.prune 2>/dev/null || :)" != "false" ] || pruneopt=
372 if ! is_gfi_mirror_url "$url"; then
373 lastwasclean=
374 [ "$(git config --bool girocco.lastupdateclean 2>/dev/null || :)" != "true" ] || lastwasclean=1
375 nextisclean=
376 [ "$(git config --bool girocco.cleanmirror 2>/dev/null || :)" != "true" ] || nextisclean=1
377 if [ "$nextisclean" != "$lastwasclean" ]; then
378 if [ -n "$nextisclean" ]; then
379 git config --replace-all remote.origin.fetch "+refs/heads/*:refs/heads/*"
380 git config --add remote.origin.fetch "+refs/tags/*:refs/tags/*"
381 git config --add remote.origin.fetch "+refs/notes/*:refs/notes/*"
382 git config --add remote.origin.fetch "+refs/top-bases/*:refs/top-bases/*"
383 else
384 git config --replace-all remote.origin.fetch "+refs/*:refs/*"
388 # remember the starting time so we can easily detect new packs for fast-import mirrors
389 # we sleep for 1 second after creating .gfipack to make sure all packs are newer
390 if is_gfi_mirror_url "$url" && [ ! -e .gfipack ]; then
391 rm -f .gfipack
392 >.gfipack
393 sleep 1
395 fetcharg="default"
396 git config remotes.default >/dev/null 2>&1 || fetcharg="--all"
397 fetchcmd="git fetch"
398 [ "$show_progress" != "0" ] || fetchcmd="git fetch -q"
399 if [ -n "$var_have_git_171" ] && [ "${show_progress:-0}" != "0" ]; then
400 # git fetch learned --progress in v1.7.1
401 case "$show_progress" in
402 [2-9]*|1[0-9]*)
403 # full volume progress with all the spammy noise
404 fetchcmd="git fetch --progress"
407 # a kinder, gentler progress that doesn't leave one
408 # covered all over in exploded bits of spam afterwards
409 fetchcmd="git_fetch_q_progress"
411 esac
413 # It's possible for a fetch to actually do something while still returning
414 # a non-zero result (perhaps some of the refs were updated but some were
415 # not -- a malicious Git-impersonation trying to set refs/heads/... refs
416 # to non-commit objects for example).
417 GIT_SSL_NO_VERIFY=1 bang_catch eval "$fetchcmd" $pruneopt --multiple "$fetcharg"
418 # If we did fetch anything, don't treat it as an error, but do keep the log;
419 # otherwise invoke bang_failed as for a normal failure
420 if [ "${bang_errcode:-0}" != "0" ]; then
421 save_bang_errcode="$bang_errcode"
422 check_after_refs
423 if [ -n "$refschanged" ]; then
424 keep_bang_log="git fetch${pruneopt:+ $pruneopt} --multiple $fetcharg"
425 else
426 bang_cmd="git fetch${pruneopt:+ $pruneopt} --multiple $fetcharg"
427 bang_errcode="$save_bang_errcode"
428 bang_failed
431 if ! is_gfi_mirror_url "$url" && [ "$nextisclean" != "$lastwasclean" ]; then
432 if [ -n "$nextisclean" ]; then
433 # We must manually purge the unclean refs now as even prune won't do it
434 git for-each-ref --format='%(refname)' |
435 LC_ALL=C sed \
436 -e '/^refs\/heads\//d' \
437 -e '/^refs\/tags\//d' \
438 -e '/^refs\/notes\//d' \
439 -e '/^refs\/top-bases\//d' \
440 -e 's/^/delete /' |
441 git_updateref_stdin
443 git config --bool girocco.lastupdateclean ${nextisclean:-0}
445 if [ -e .gfipack ] && is_gfi_mirror_url "$url"; then
446 find -L objects/pack -type f -newer .gfipack -name "pack-$octet20*.pack" -print >>gfi-packs
447 rm -f .gfipack
450 esac
452 # The objects subdirectories permissions must be updated now.
453 # In the case of a dumb http clone, the permissions will not be correct
454 # (missing group write) despite the core.sharedrepository=1 setting!
455 # The objects themselves seem to have the correct permissions.
456 # This problem appears to have been fixed in the most recent git versions.
457 perms=g+w
458 [ "$cfg_permission_control" != "Hooks" ] || perms=go+w
459 chmod $perms $(find -L objects -maxdepth 1 -type d) 2>/dev/null || :
461 bang git update-server-info
463 # We maintain the last refresh date in two places deliberately
464 # so that it's available as part of the config data and also
465 # as a standalone file timestamp that can be accessed without git.
466 bang config_set lastrefresh "$(date "$datefmt")"
467 { >.last_refresh; } 2>/dev/null || :
469 # Check to see if any refs changed
470 check_after_refs
472 # Pack all refs if any changed to keep things as efficient as possible
473 # Project mirror updates do not occur that often therefore this is a win
474 # However, if pack-refs fails for some reason, we can just ignore and continue
475 # The "--prune" option is the default since v1.5.0 but it serves as "documentation" here
476 [ -z "$refschanged" ] || git pack-refs --all --prune || :
478 # Force a mini-gc if $Girocco::Config::delay_gfi_redelta is false and there's
479 # at least one gfi pack present now
480 if [ -z "$cfg_delay_gfi_redelta" ] && ! [ -e .needsgc ] &&
481 [ -f gfi-packs ] && [ -s gfi-packs ] && is_gfi_mirror_url "$url"; then
482 >.needsgc
485 # Activate a mini-gc if needed
486 check_and_set_needsgc
488 # Look at which refs changed and trigger ref-change for these
489 sockpath="$cfg_chroot/etc/taskd.socket"
490 if [ -n "$refschanged" ]; then
491 bang config_set lastreceive "$(date '+%a, %d %b %Y %T %z')"
492 # We always use UTC for the log timestamp so that chroot and non-chroot match up.
493 # We don't have to worry about multiple log files since only one update runs
494 lognamets="$(TZ=UTC date '+%Y%m%d_%H%M%S')"
495 loghhmmss="${lognamets##*_}"
496 logname="reflogs/${lognamets%%_*}"
497 # We freshen the mod time to now on any old or new ref that is a loose object
498 # For old refs we do it so we will be able to keep them around for 1 day
499 # For new refs we do it in case we are about to run gc and the new ref
500 # actually points to an oldish loose object that had been unreachable
501 # We probably do not need to do it for new refs as Git tries to do that,
502 # but since we're already doing it for old refs (which Git does not do),
503 # it's almost no extra work for new refs, just in case.
505 echo "ref-changes %$proj% $proj"
506 LC_ALL=C join .refs-before .refs-after |
507 LC_ALL=C sed -e '/^[^ ][^ ]* \([^ ][^ ]*\) \1$/d' |
508 while read ref old new; do
509 echo "$loghhmmss $old $new $ref" >&3
510 freshen_loose_objects "$old" "$new"
511 echo "$old $new $ref"
512 done
513 LC_ALL=C join -v 1 .refs-before .refs-after |
514 while read ref old; do
515 echo "$loghhmmss $old 0000000000000000000000000000000000000000 $ref" >&3
516 freshen_loose_objects "$old"
517 echo "$old 0000000000000000000000000000000000000000 $ref"
518 done
519 LC_ALL=C join -v 2 .refs-before .refs-after |
520 while read ref new; do
521 echo "$loghhmmss 0000000000000000000000000000000000000000 $new $ref" >&3
522 freshen_loose_objects "$new"
523 echo "0000000000000000000000000000000000000000 $new $ref"
524 done
525 git for-each-ref --format='%(objectname) %(objectname) %(refname)' refs/heads
526 echo "done ref-changes %$proj% $proj"
527 } >.refs-temp 3>>"$logname"
528 if [ -S "$sockpath" ]; then
529 trap ':' PIPE
530 nc_openbsd -w 15 -U "$sockpath" <.refs-temp || :
531 trap - PIPE
533 bang config_set lastchange "$(date '+%a, %d %b %Y %T %z')"
534 bang_eval "git for-each-ref --sort=-committerdate --format='%(committerdate:iso8601)' \
535 --count=1 refs/heads >info/lastactivity"
536 ! [ -d htmlcache ] || { >htmlcache/changed; } 2>/dev/null || :
537 rm -f .delaygc .allowgc
539 [ "${cfg_autogchack:-0}" != "0" ] &&
540 [ "$(git config --get --bool girocco.autogchack 2>/dev/null)" != "false" ]
541 then
542 mv -f .refs-after .refs-last
546 # If the repository does not yet have a valid HEAD symref try to set one
547 # If an empty repository was cloned and then later becomes unempty you just
548 # lose out on the fancy "symref=HEAD:" logic and get this version instead
549 check_and_set_head || :
551 rm -f .refs-before .refs-after .refs-temp FETCH_HEAD
553 if is_banged; then
554 [ -z "$mailaddrs" ] || ! was_banged_message_sent ||
556 echo "$proj update succeeded - failure recovery"
557 echo "this status message may be disabled on the project admin page"
558 } | mailref "update@$cfg_gitweburl/$proj.git" -s "[$cfg_name] $proj update succeeded" "$mailaddrs" || :
559 bang_reset
562 if [ -n "$keep_bang_log" ] && [ -s "$bang_log" ]; then
563 cat "$bang_log" >.banglog
564 echo "" >>.banglog
565 echo "$keep_bang_log failed with error code $save_bang_errcode" >>.banglog
568 progress "- [$proj] update ($(date))"