update.sh: cleanup the cleanup code a bit
[girocco.git] / jobd / update.sh
blobe2d1dcea81fe2dffb2882cdba9d434b2ab61dba2
1 #!/bin/sh
3 . @basedir@/shlib.sh
5 set -e
7 if [ $# -ne 1 ]; then
8 echo "Usage: update.sh projname" >&2
9 exit 1
12 # date -R is linux-only, POSIX equivalent is '+%a, %d %b %Y %T %z'
13 datefmt='+%a, %d %b %Y %T %z'
15 git_fetch_q_progress() {
16 if [ "${cfg_max_file_size512:-0}" != "0" ]; then
17 GIT_BIN="'$cfg_basedir/bin/ulimit512' -i -f '$cfg_max_file_size512' -- '$cfg_git_bin'" &&
18 export GIT_BIN
20 PATH="$var_git_exec_path:$cfg_basedir/bin:$PATH" @basedir@/jobd/git-fetch-q-progress.sh "$@"
23 # freshen_loose_objects full-sha ...
24 # if "$n" is a loose object, set its modification time to now
25 # otherwise silently do nothing with no error. To facilitate conversion
26 # of mirror projects to push projects we also add group write permission.
27 freshen_loose_objects() {
28 _list=
29 for _sha; do
30 _fn="${_sha#??}"
31 _shard="${_sha%$_fn}"
32 _list="$_list objects/$_shard/$_fn"
33 done
34 if [ -n "$_list" ]; then
35 chmod ug+w $_list 2>/dev/null || :
36 touch -c $_list 2>/dev/null || :
40 # darcs fast-export | git fast-import with error handling
41 git_darcs_fetch() (
42 set_utf8_locale
43 _err1=
44 _err2=
45 exec 3>&1
46 { read -r _err1 || :; read -r _err2 || :; } <<-EOT
48 exec 4>&3 3>&1 1>&4 4>&-
50 _e1=0
51 "$cfg_basedir"/bin/darcs-fast-export \
52 --export-marks="$(pwd)/dfe-marks" \
53 --import-marks="$(pwd)/dfe-marks" "$1" 3>&- || _e1=$?
54 echo $_e1 >&3
55 } |
57 _e2=0
58 git_ulimit fast-import \
59 --export-marks="$(pwd)/gfi-marks" \
60 --export-pack-edges="$(pwd)/gfi-packs" \
61 --import-marks="$(pwd)/gfi-marks" \
62 --force 3>&- || _e2=$?
63 echo $_e2 >&3
66 EOT
67 exec 3>&-
68 [ "$_err1" = 0 ] && [ "$_err2" = 0 ]
69 return $?
72 # bzr fast-export | git fast-import with error handling
73 git_bzr_fetch() (
74 set_utf8_locale
75 BZR_LOG=/dev/null
76 export BZR_LOG
77 _err1=
78 _err2=
79 exec 3>&1
80 { read -r _err1 || :; read -r _err2 || :; } <<-EOT
82 exec 4>&3 3>&1 1>&4 4>&-
84 _e1=0
85 bzr fast-export --plain \
86 --export-marks="$(pwd)/bfe-marks" \
87 --import-marks="$(pwd)/bfe-marks" "$1" 3>&- || _e1=$?
88 echo $_e1 >&3
89 } |
91 _e2=0
92 git_ulimit fast-import \
93 --export-marks="$(pwd)/gfi-marks" \
94 --export-pack-edges="$(pwd)/gfi-packs" \
95 --import-marks="$(pwd)/gfi-marks" \
96 --force 3>&- || _e2=$?
97 echo $_e2 >&3
101 exec 3>&-
102 [ "$_err1" = 0 ] && [ "$_err2" = 0 ]
103 return $?
106 [ -n "$cfg_mirror" ] || { echo "Mirroring is disabled" >&2; exit 0; }
108 umask 002
109 [ "$cfg_permission_control" != "Hooks" ] || umask 000
110 clean_git_env
112 proj="${1%.git}"
113 cd "$cfg_reporoot/$proj.git"
115 # Activate a mini-gc if needed
116 # We do this here as well as after a successful fetch so that if we're stuck
117 # in a fetch loop where fetches are succeeding in fetching new packs but the
118 # ref update is failing for some reason (perhaps a non-commit under refs/heads)
119 # and a previous invokation therefore had a "bang" exit then we will still
120 # get the .needsgc flag set in a timely fashion to avoid excess pack build up.
121 check_and_set_needsgc
123 bang_log=
124 cleanup_exit() {
125 ec=$?
126 if [ $ec != 0 ]; then
127 echo "update failed dir: $PWD" >&2
129 [ -z "$bang_log" ] || rm -f "$bang_log"
131 trap 'cleanup_exit' EXIT
132 trap 'exit 129' HUP
133 trap 'exit 130' INT
134 trap 'exit 131' QUIT
135 trap 'exit 134' ABRT
136 trap 'exit 141' PIPE
137 trap 'exit 142' ALRM
138 trap 'exit 143' TERM
140 if [ "${force_update:-0}" = "0" ] && check_interval lastrefresh $cfg_min_mirror_interval; then
141 progress "= [$proj] update skip (last at $(config_get lastrefresh))"
142 exit 0
144 if [ -e .nofetch ]; then
145 progress "x [$proj] update disabled (.nofetch exists)"
146 exit 0
148 progress "+ [$proj] update ($(date))"
150 # Any pre-existing FETCH_HEAD from a previous fetch, failed or not, is garbage
151 rm -f FETCH_HEAD
153 # Remove any stale ref locks
154 clear_stale_ref_locks
156 # A previous failed update attempt can leave a huge tmp_pack_XXXXXX file behind.
157 # Since no pushes are allowed to mirrors, we know that any such files that exist
158 # at this point in time are garbage and can be safely deleted, we do not even
159 # need to check how old they are. A tmp_idx_XXXXXX file is also created during
160 # the later stages of the fetch process, so we kill any of those as well.
161 find -L objects/pack -maxdepth 1 -type f -name "tmp_pack_?*" -exec rm -f '{}' + || :
162 find -L objects/pack -maxdepth 1 -type f -name "tmp_idx_?*" -exec rm -f '{}' + || :
164 # Make sure we have a reflogs subdirectory and abort the update if not
165 # This should not count as a normal "bang" failure if unsuccessful
166 [ -d reflogs ] || mkdir -p reflogs >/dev/null 2>&1 || :
167 [ -d reflogs ]
169 keep_bang_log=
170 do_check_after_refs=1
171 bang_setup
172 bang_action="update"
173 bang_trap() {
174 if [ -n "$1" ]; then
175 # Throttle retries
176 # Since gitweb shows the .last_refresh date, it's safe to update
177 # gitweb.lastrefresh to throttle the updates w/o corrupting the
178 # last refresh date display on the gitweb summary page
179 # It's therefore important that we do NOT touch .last_refresh here
180 config_set lastrefresh "$(date "$datefmt")"
184 bang echo "Project: $proj"
185 bang echo " Date: $(TZ=UTC date '+%Y-%m-%d %T UTC')"
186 bang echo ""
187 mail="$(config_get owner)" || :
188 url="$(config_get baseurl)" || :
189 case "$url" in *" "*|*" "*|"")
190 bang_eval 'echo "Bad mirror URL (\"$url\")"; ! :'
191 exit 1
192 esac
193 bang echo "Mirroring from URL \"$url\""
194 bang echo ""
195 statusok="$(git config --bool gitweb.statusupdates 2>/dev/null || echo true)"
196 mailaddrs=
197 [ "$statusok" = "false" ] || [ -z "$mail" ] || mailaddrs="$mail"
198 [ -z "$cfg_admincc" ] || [ "$cfg_admincc" = "0" ] || [ -z "$cfg_admin" ] ||
199 if [ -z "$mailaddrs" ]; then mailaddrs="$cfg_admin"; else mailaddrs="$mailaddrs,$cfg_admin"; fi
201 bang_eval "git for-each-ref --format '%(refname) %(objectname)' >.refs-temp"
202 bang_eval "LC_ALL=C sort -b -k1,1 <.refs-temp >.refs-before"
204 check_after_refs() {
205 [ -n "$do_check_after_refs" ] || return 0
206 bang_eval "git for-each-ref --format '%(refname) %(objectname)' >.refs-temp"
207 bang_eval "LC_ALL=C sort -b -k1,1 <.refs-temp >.refs-after"
208 refschanged=
209 cmp -s .refs-before .refs-after || refschanged=1
210 do_check_after_refs=
213 ! [ -e .delaygc ] || >.allowgc || :
215 # Make sure we don't get any unwanted loose objects
216 # Starting with Git v2.10.0 fast-import can generate loose objects unless we
217 # tweak its configuration to prevent that
218 git_add_config 'fetch.unpackLimit=1'
219 # Note the git config documentation is wrong
220 # transfer.unpackLimit, if set, overrides fetch.unpackLimit
221 git_add_config 'transfer.unpackLimit=1'
222 # But not the Git v2.10.0 and later fastimport.unpackLimit which improperly uses <= instead of <
223 git_add_config 'fastimport.unpackLimit=0'
225 # remember the starting time so we can easily combine fetched loose objects
226 # we sleep for 1 second after creating .needspack to make sure all objects are newer
227 if ! [ -e .needspack ]; then
228 rm -f .needspack
229 >.needspack
230 sleep 1
233 case "$url" in
234 svn://* | svn+http://* | svn+https://* | svn+file://* | svn+ssh://*)
235 [ -n "$cfg_mirror_svn" ] || { echo "Mirroring svn is disabled" >&2; exit 0; }
236 # Allow the username to be specified in the "svn-credential.svn.username"
237 # property and the password in the "svn-credential.svn.password" property
238 # Use an 'anonsvn' username by default as is commonly used for anonymous svn
239 # Default the password to the same as the username
240 # The password property will be ignored unless a username has been specified
241 if svnuser="$(git config --get svn-credential.svn.username)" && [ -n "$svnuser" ]; then
242 if ! svnpass="$(git config --get svn-credential.svn.password)"; then
243 svnpass="$svnuser"
245 url1="${url#*://}"
246 url1="${url1%%/*}"
247 case "$url1" in ?*"@"?*)
248 urlsch="${url%%://*}"
249 url="$urlsch://${url#*@}"
250 esac
251 else
252 # As a fallback, check in the URL, just in case
253 url1="${url#*://}"
254 url1="${url1%%/*}"
255 svnuser=
256 case "$url1" in ?*"@"?*)
257 urlsch="${url%%://*}"
258 url="$urlsch://${url#*@}"
259 url1="${url1%%@*}"
260 svnuser="${url1%%:*}"
261 if [ -n "$svnuser" ]; then
262 svnpass="$svnuser"
263 case "$url1" in *":"*)
264 svnpass="${url1#*:}"
265 esac
267 esac
268 if [ -z "$svnuser" ]; then
269 svnuser="anonsvn"
270 svnpass="anonsvn"
273 GIT_ASKPASS_PASSWORD="$svnpass"
274 export GIT_ASKPASS_PASSWORD
275 # Update the git svn url to match baseurl but be cognizant of any
276 # needed prefix changes. See the comments in taskd/clone.sh about
277 # why we need to put up with a prefix in the first place.
278 case "$url" in svn+ssh://*) svnurl="$url";; *) svnurl="${url#svn+}";; esac
279 svnurl="${svnurl%/}"
280 svnurlold="$(config_get svnurl)" || :
281 if [ "$svnurl" != "$svnurlold" ]; then
282 # We better already have an svn-remote.svn.fetch setting
283 bang test -n "$(git config --get-all svn-remote.svn.fetch)" || :
284 # the only way to truly know what the proper prefix is
285 # is to attempt a fresh git-svn init -s on the new url
286 rm -rf svn-new-url || :
287 # We require svn info to succeed on the URL otherwise it's
288 # simply not a valid URL and without using -s on the init it
289 # will not otherwise be tested until the fetch
290 bang eval 'svn --non-interactive --username "$svnuser" --password "$svnpass" info "$svnurl" >/dev/null'
291 bang mkdir svn-new-url
292 GIT_DIR=svn-new-url bang git init --bare --quiet
293 # We initially use -s for the init which will possibly shorten
294 # the URL. However, the shortening can fail if a password is
295 # not required for the longer version but is for the shorter,
296 # so try again without -s if the -s version fails.
297 cmdstr='git svn init --username="$svnuser" --prefix "" -s "$svnurl" <"$mtlinesfile" >/dev/null 2>&1 || '
298 cmdstr="$cmdstr"'git svn init --username="$svnuser" --prefix "" "$svnurl" <"$mtlinesfile" >/dev/null 2>&1'
299 GIT_DIR=svn-new-url bang eval "$cmdstr"
300 gitsvnurl="$(GIT_DIR=svn-new-url git config --get svn-remote.svn.url)" || :
301 gitsvnfetch="$(GIT_DIR=svn-new-url git config --get svn-remote.svn.fetch)" || :
302 gitsvnprefixnew="${gitsvnfetch%%:*}"
303 gitsvnsuffixnew="${gitsvnprefixnew##*/}"
304 gitsvnprefixnew="${gitsvnprefixnew%$gitsvnsuffixnew}"
305 rm -rf svn-new-url || :
306 # Using GIT_DIR= with bang leaves it set to svn-new-url, so reset it to .
307 GIT_DIR=.
308 if [ "$gitsvnurl" != "$(git config --get svn-remote.svn.url || :)" ]; then
309 # The url has been changed.
310 # We must update the url and replace the prefix on all config items
311 gitsvnfetch="$(git config --get-all svn-remote.svn.fetch | head -1)" || :
312 gitsvnprefixold="${gitsvnfetch%%:*}"
313 gitsvnsuffixold="${gitsvnprefixold##*/}"
314 gitsvnprefixold="${gitsvnprefixold%$gitsvnsuffixold}"
315 git config --remove-section 'svn-remote.svnnew' 2>/dev/null || :
316 git config 'svn-remote.svnnew.url' "$gitsvnurl"
317 git config --get-regexp '^svn-remote\.svn\.' |
318 while read -r sname sval; do
319 case "$sname" in
320 svn-remote.svn.fetch|svn-remote.svn.branches|svn-remote.svn.tags)
321 sname="${sname#svn-remote.svn.}"
322 sval="${sval#$gitsvnprefixold}"
323 bang git config --add "svn-remote.svnnew.$sname" "${gitsvnprefixnew}$sval"
324 esac
325 done
326 test $? -eq 0
327 bang git config -f svn/.metadata svn-remote.svn.reposRoot "$gitsvnurl"
328 bang git config --remove-section svn-remote.svn
329 bang git config --rename-section svn-remote.svnnew svn-remote.svn
331 bang config_set svnurl "$svnurl"
333 # remove any stale *.lock files greater than 1 hour old in case
334 # git-svn was killed on the last update because it took too long
335 find -L svn -type f -name '*.lock' -mmin +60 -exec rm -f '{}' + 2>/dev/null || :
336 GIROCCO_DIVERT_GIT_SVN_AUTO_GC=1
337 export GIROCCO_DIVERT_GIT_SVN_AUTO_GC
338 unset GIROCCO_SUPPRESS_AUTO_GC_UPDATE
339 saveconfig="$GIT_CONFIG_PARAMETERS"
340 git_add_config 'gc.auto=1'
341 git_add_config 'gc.autoPackLimit=1'
342 GIT_DIR=. bang git_ulimit svn fetch --log-window-size=$var_log_window_size --username="$svnuser" --quiet <"$mtlinesfile"
343 GIROCCO_SUPPRESS_AUTO_GC_UPDATE=1
344 export GIROCCO_SUPPRESS_AUTO_GC_UPDATE
345 unset GIROCCO_DIVERT_GIT_SVN_AUTO_GC
346 unset GIT_CONFIG_PARAMETERS
347 [ -z "$saveconfig" ] || {
348 GIT_CONFIG_PARAMETERS="$saveconfig"
349 export GIT_CONFIG_PARAMETERS
351 # git svn does not preserve group permissions in the svn subdirectory
352 chmod -R ug+rw,o+r svn
353 # git svn also leaves behind ref turds that end with @nnn
354 # We get rid of them now
355 git for-each-ref --format='%(refname)' |
356 LC_ALL=C sed '/^..*@[1-9][0-9]*$/!d; s/^/delete /' |
357 git_updateref_stdin
358 unset GIT_ASKPASS_PASSWORD
360 darcs://* | darcs+http://* | darcs+https://*)
361 [ -n "$cfg_mirror_darcs" ] || { echo "Mirroring darcs is disabled" >&2; exit 0; }
362 case "$url" in
363 darcs://*) darcsurl="http://${url#darcs://}";;
364 *) darcsurl="${url#darcs+}";;
365 esac
366 # remove any stale lock files greater than 1 hour old in case
367 # darcs_fast_export was killed on the last update because it took too long
368 find -L *.darcs -maxdepth 2 -type f -name 'lock' -mmin +60 -exec rm -f '{}' + 2>/dev/null || :
369 bang git_darcs_fetch "$darcsurl"
371 bzr://*)
372 [ -n "$cfg_mirror_bzr" ] || { echo "Mirroring bzr is disabled" >&2; exit 0; }
373 bzrurl="${url#bzr://}"
374 bang git_bzr_fetch "$bzrurl"
376 hg+http://* | hg+https://* | hg+file://* | hg+ssh://*)
377 [ -n "$cfg_mirror_hg" ] || { echo "Mirroring hg is disabled" >&2; exit 0; }
378 # We just remove hg+ here, so hg+http://... becomes http://...
379 hgurl="${url#hg+}"
380 # Fetch any new updates
381 bang hg -R "$(pwd)/repo.hg" pull
382 # Do the fast-export | fast-import
383 bang git_hg_fetch
386 [ "$url" = "$(git config --get remote.origin.url || :)" ] || bang config_set_raw remote.origin.url "$url"
387 pruneopt=--prune
388 [ "$(git config --bool fetch.prune 2>/dev/null || :)" != "false" ] || pruneopt=
389 if ! is_gfi_mirror_url "$url"; then
390 lastwasclean=
391 [ "$(git config --bool girocco.lastupdateclean 2>/dev/null || :)" != "true" ] || lastwasclean=1
392 nextisclean=
393 [ "$(git config --bool girocco.cleanmirror 2>/dev/null || :)" != "true" ] || nextisclean=1
394 if [ "$nextisclean" != "$lastwasclean" ]; then
395 if [ -n "$nextisclean" ]; then
396 git config --replace-all remote.origin.fetch "+refs/heads/*:refs/heads/*"
397 git config --add remote.origin.fetch "+refs/tags/*:refs/tags/*"
398 git config --add remote.origin.fetch "+refs/notes/*:refs/notes/*"
399 git config --add remote.origin.fetch "+refs/top-bases/*:refs/top-bases/*"
400 else
401 git config --replace-all remote.origin.fetch "+refs/*:refs/*"
405 # remember the starting time so we can easily detect new packs for fast-import mirrors
406 # we sleep for 1 second after creating .gfipack to make sure all packs are newer
407 if is_gfi_mirror_url "$url" && [ ! -e .gfipack ]; then
408 rm -f .gfipack
409 >.gfipack
410 sleep 1
412 fetcharg="default"
413 git config remotes.default >/dev/null 2>&1 || fetcharg="--all"
414 fetchcmd="git_ulimit fetch"
415 [ "$show_progress" != "0" ] || fetchcmd="git_ulimit fetch -q"
416 if [ -n "$var_have_git_171" ] && [ "${show_progress:-0}" != "0" ]; then
417 # git fetch learned --progress in v1.7.1
418 case "$show_progress" in
419 [2-9]*|1[0-9]*)
420 # full volume progress with all the spammy noise
421 fetchcmd="git_ulimit fetch --progress"
424 # a kinder, gentler progress that doesn't leave one
425 # covered all over in exploded bits of spam afterwards
426 fetchcmd="git_fetch_q_progress"
428 esac
430 # It's possible for a fetch to actually do something while still returning
431 # a non-zero result (perhaps some of the refs were updated but some were
432 # not -- a malicious Git-impersonation trying to set refs/heads/... refs
433 # to non-commit objects for example).
434 GIT_SSL_NO_VERIFY=1 bang_catch eval "$fetchcmd" $pruneopt --multiple "$fetcharg"
435 # If we did fetch anything, don't treat it as an error, but do keep the log;
436 # otherwise invoke bang_failed as for a normal failure
437 if [ "${bang_errcode:-0}" != "0" ]; then
438 save_bang_errcode="$bang_errcode"
439 check_after_refs
440 if [ -n "$refschanged" ]; then
441 keep_bang_log="git fetch${pruneopt:+ $pruneopt} --multiple $fetcharg"
442 else
443 bang_cmd="git fetch${pruneopt:+ $pruneopt} --multiple $fetcharg"
444 bang_errcode="$save_bang_errcode"
445 bang_failed
448 if ! is_gfi_mirror_url "$url" && [ "$nextisclean" != "$lastwasclean" ]; then
449 if [ -n "$nextisclean" ]; then
450 # We must manually purge the unclean refs now as even prune won't do it
451 git for-each-ref --format='%(refname)' |
452 LC_ALL=C sed \
453 -e '/^refs\/heads\//d' \
454 -e '/^refs\/tags\//d' \
455 -e '/^refs\/notes\//d' \
456 -e '/^refs\/top-bases\//d' \
457 -e 's/^/delete /' |
458 git_updateref_stdin
460 git config --bool girocco.lastupdateclean ${nextisclean:-0}
462 if [ -e .gfipack ] && is_gfi_mirror_url "$url"; then
463 find -L objects/pack -type f -newer .gfipack -name "pack-$octet20*.pack" -print >>gfi-packs
464 rm -f .gfipack
467 esac
469 # The objects subdirectories permissions must be updated now.
470 # In the case of a dumb http clone, the permissions will not be correct
471 # (missing group write) despite the core.sharedrepository=1 setting!
472 # The objects themselves seem to have the correct permissions.
473 # This problem appears to have been fixed in the most recent git versions.
474 perms=g+w
475 [ "$cfg_permission_control" != "Hooks" ] || perms=go+w
476 chmod $perms $(find -L objects -maxdepth 1 -type d) 2>/dev/null || :
478 # We maintain the last refresh date in two places deliberately
479 # so that it's available as part of the config data and also
480 # as a standalone file timestamp that can be accessed without git.
481 bang config_set lastrefresh "$(date "$datefmt")"
482 { >.last_refresh; } 2>/dev/null || :
484 # Check to see if any refs changed
485 check_after_refs
487 # Update server info if any refs changed (if they didn't packs shouldn't have either)
488 [ -z "$refschanged" ] || bang git update-server-info
490 # Pack all refs if any changed to keep things as efficient as possible
491 # Project mirror updates do not occur that often therefore this is a win
492 # However, if pack-refs fails for some reason, we can just ignore and continue
493 # The "--prune" option is the default since v1.5.0 but it serves as "documentation" here
494 [ -z "$refschanged" ] || git pack-refs --all --prune || :
496 # Force a mini-gc if $Girocco::Config::delay_gfi_redelta is false and there's
497 # at least one gfi pack present now
498 if [ -z "$cfg_delay_gfi_redelta" ] && ! [ -e .needsgc ] &&
499 [ -f gfi-packs ] && [ -s gfi-packs ] && is_gfi_mirror_url "$url"; then
500 >.needsgc
503 # Activate a mini-gc if needed
504 check_and_set_needsgc
506 # Look at which refs changed and trigger ref-change for these
507 sockpath="$cfg_chroot/etc/taskd.socket"
508 if [ -n "$refschanged" ]; then
509 bang config_set lastreceive "$(date '+%a, %d %b %Y %T %z')"
510 # We always use UTC for the log timestamp so that chroot and non-chroot match up.
511 # We don't have to worry about multiple log files since only one update runs
512 lognamets="$(TZ=UTC date '+%Y%m%d_%H%M%S')"
513 loghhmmss="${lognamets##*_}"
514 logname="reflogs/${lognamets%%_*}"
515 # We freshen the mod time to now on any old or new ref that is a loose object
516 # For old refs we do it so we will be able to keep them around for 1 day
517 # For new refs we do it in case we are about to run gc and the new ref
518 # actually points to an oldish loose object that had been unreachable
519 # We probably do not need to do it for new refs as Git tries to do that,
520 # but since we're already doing it for old refs (which Git does not do),
521 # it's almost no extra work for new refs, just in case.
523 echo "ref-changes %$proj% $proj"
524 LC_ALL=C join .refs-before .refs-after |
525 LC_ALL=C sed -e '/^[^ ][^ ]* \([^ ][^ ]*\) \1$/d' |
526 while read ref old new; do
527 echo "$loghhmmss $old $new $ref" >&3
528 freshen_loose_objects "$old" "$new"
529 echo "$old $new $ref"
530 done
531 LC_ALL=C join -v 1 .refs-before .refs-after |
532 while read ref old; do
533 echo "$loghhmmss $old 0000000000000000000000000000000000000000 $ref" >&3
534 freshen_loose_objects "$old"
535 echo "$old 0000000000000000000000000000000000000000 $ref"
536 done
537 LC_ALL=C join -v 2 .refs-before .refs-after |
538 while read ref new; do
539 echo "$loghhmmss 0000000000000000000000000000000000000000 $new $ref" >&3
540 freshen_loose_objects "$new"
541 echo "0000000000000000000000000000000000000000 $new $ref"
542 done
543 git for-each-ref --format='%(objectname) %(objectname) %(refname)' refs/heads
544 echo "done ref-changes %$proj% $proj"
545 } >.refs-temp 3>>"$logname"
546 if [ -S "$sockpath" ]; then
547 trap ':' PIPE
548 nc_openbsd -w 15 -U "$sockpath" <.refs-temp || :
549 trap - PIPE
551 bang config_set lastchange "$(date '+%a, %d %b %Y %T %z')"
552 bang_eval "git for-each-ref --sort=-committerdate --format='%(committerdate:iso8601)' \
553 --count=1 refs/heads >info/lastactivity"
554 ! [ -d htmlcache ] || { >htmlcache/changed; } 2>/dev/null || :
555 rm -f .delaygc .allowgc
557 [ "${cfg_autogchack:-0}" != "0" ] &&
558 [ "$(git config --get --bool girocco.autogchack 2>/dev/null)" != "false" ]
559 then
560 mv -f .refs-after .refs-last
564 # If the repository does not yet have a valid HEAD symref try to set one
565 # If an empty repository was cloned and then later becomes unempty you just
566 # lose out on the fancy "symref=HEAD:" logic and get this version instead
567 check_and_set_head || :
569 rm -f .refs-before .refs-after .refs-temp FETCH_HEAD
571 if is_banged; then
572 [ -z "$mailaddrs" ] || ! was_banged_message_sent ||
574 echo "$proj update succeeded - failure recovery"
575 echo "this status message may be disabled on the project admin page"
576 } | mailref "update@$cfg_gitweburl/$proj.git" -s "[$cfg_name] $proj update succeeded" "$mailaddrs" || :
577 bang_reset
580 if [ -n "$keep_bang_log" ] && [ -s "$bang_log" ]; then
581 cat "$bang_log" >.banglog
582 echo "" >>.banglog
583 echo "$keep_bang_log failed with error code $save_bang_errcode" >>.banglog
586 progress "- [$proj] update ($(date))"