updategc-util-functions.sh: parametize "still running" message
[girocco.git] / jobd / update.sh
blobea21e078db73e2eec608e5bf7c02956ba43682f4
1 #!/bin/sh
3 . @basedir@/shlib.sh
5 set -e
7 if [ $# -ne 1 ]; then
8 echo "Usage: update.sh projname" >&2
9 exit 1
12 # date -R is linux-only, POSIX equivalent is '+%a, %d %b %Y %T %z'
13 datefmt='+%a, %d %b %Y %T %z'
15 git_fetch_q_progress() {
16 if [ "${cfg_max_file_size512:-0}" != "0" ]; then
17 GIT_BIN="'$cfg_basedir/bin/ulimit512' -i -f '$cfg_max_file_size512' -- '$cfg_git_bin'" &&
18 export GIT_BIN
20 PATH="$var_git_exec_path:$cfg_basedir/bin:$PATH" @basedir@/jobd/git-fetch-q-progress.sh "$@"
23 # freshen_loose_objects full-sha ...
24 # if "$n" is a loose object, set its modification time to now
25 # otherwise silently do nothing with no error. To facilitate conversion
26 # of mirror projects to push projects we also add group write permission.
27 freshen_loose_objects() {
28 _list=
29 for _sha; do
30 _fn="${_sha#??}"
31 _shard="${_sha%$_fn}"
32 _list="$_list objects/$_shard/$_fn"
33 done
34 if [ -n "$_list" ]; then
35 chmod ug+w $_list 2>/dev/null || :
36 touch -c $_list 2>/dev/null || :
40 # darcs fast-export | git fast-import with error handling
41 git_darcs_fetch() (
42 set_utf8_locale
43 _err1=
44 _err2=
45 exec 3>&1
46 { read -r _err1 || :; read -r _err2 || :; } <<-EOT
48 exec 4>&3 3>&1 1>&4 4>&-
50 _e1=0
51 "$cfg_basedir"/bin/darcs-fast-export \
52 --export-marks="$(pwd)/dfe-marks" \
53 --import-marks="$(pwd)/dfe-marks" "$1" 3>&- || _e1=$?
54 echo $_e1 >&3
55 } |
57 _e2=0
58 git_ulimit fast-import \
59 --export-marks="$(pwd)/gfi-marks" \
60 --export-pack-edges="$(pwd)/gfi-packs" \
61 --import-marks="$(pwd)/gfi-marks" \
62 --force 3>&- || _e2=$?
63 echo $_e2 >&3
66 EOT
67 exec 3>&-
68 [ "$_err1" = 0 ] && [ "$_err2" = 0 ]
69 return $?
72 # bzr fast-export | git fast-import with error handling
73 git_bzr_fetch() (
74 set_utf8_locale
75 BZR_LOG=/dev/null
76 export BZR_LOG
77 _err1=
78 _err2=
79 exec 3>&1
80 { read -r _err1 || :; read -r _err2 || :; } <<-EOT
82 exec 4>&3 3>&1 1>&4 4>&-
84 _e1=0
85 bzr fast-export --plain \
86 --export-marks="$(pwd)/bfe-marks" \
87 --import-marks="$(pwd)/bfe-marks" "$1" 3>&- || _e1=$?
88 echo $_e1 >&3
89 } |
91 _e2=0
92 git_ulimit fast-import \
93 --export-marks="$(pwd)/gfi-marks" \
94 --export-pack-edges="$(pwd)/gfi-packs" \
95 --import-marks="$(pwd)/gfi-marks" \
96 --force 3>&- || _e2=$?
97 echo $_e2 >&3
101 exec 3>&-
102 [ "$_err1" = 0 ] && [ "$_err2" = 0 ]
103 return $?
106 [ -n "$cfg_mirror" ] || { echo "Mirroring is disabled" >&2; exit 0; }
108 umask 002
109 dperms=2775
110 [ "$cfg_permission_control" != "Hooks" ] || { umask 000; dperms=2777; }
111 clean_git_env
113 proj="${1%.git}"
114 cd "$cfg_reporoot/$proj.git"
116 # Activate a mini-gc if needed
117 # We do this here as well as after a successful fetch so that if we're stuck
118 # in a fetch loop where fetches are succeeding in fetching new packs but the
119 # ref update is failing for some reason (perhaps a non-commit under refs/heads)
120 # and a previous invokation therefore had a "bang" exit then we will still
121 # get the .needsgc flag set in a timely fashion to avoid excess pack build up.
122 check_and_set_needsgc
124 bang_log=
125 incoming_fetch=
126 incoming_objs=
127 cleanup_exit() {
128 ec=$?
129 if [ $ec != 0 ]; then
130 echo "update failed dir: $PWD" >&2
132 [ -z "$incoming_fetch" ] || rm -rf "$incoming_fetch"
133 [ -z "$incoming_objs" ] || rm -rf "$incoming_objs"
134 [ -z "$bang_log" ] || rm -f "$bang_log"
136 trap 'cleanup_exit' EXIT
137 trap 'exit 129' HUP
138 trap 'exit 130' INT
139 trap 'exit 131' QUIT
140 trap 'exit 134' ABRT
141 trap 'exit 141' PIPE
142 trap 'exit 142' ALRM
143 trap 'exit 143' TERM
145 if [ "${force_update:-0}" = "0" ] && check_interval lastrefresh $cfg_min_mirror_interval; then
146 progress "= [$proj] update skip (last at $(config_get lastrefresh))"
147 exit 0
149 if [ -e .nofetch ]; then
150 progress "x [$proj] update disabled (.nofetch exists)"
151 exit 0
153 progress "+ [$proj] update ($(date))"
155 # Any pre-existing FETCH_HEAD from a previous fetch, failed or not, is garbage
156 rm -f FETCH_HEAD
158 # Remove any stale ref locks
159 clear_stale_ref_locks
161 # Remove any stale incoming-* object quarantine directories that are
162 # more than 12 hours old. These are new with Git >= 2.11.0.
163 # But we also create our own during the fetch process as Git's quarantine
164 # only applies to incoming receive-pack which we imitate for our fetch.
165 find -L . objects -maxdepth 1 -type d -name 'incoming-?*' -mmin +720 \
166 -exec rm -rf '{}' + || :
168 # A previous failed update attempt can leave a huge tmp_pack_XXXXXX file behind.
169 # Since no pushes are allowed to mirrors, we know that any such files that exist
170 # at this point in time are garbage and can be safely deleted, we do not even
171 # need to check how old they are. A tmp_idx_XXXXXX file is also created during
172 # the later stages of the fetch process, so we kill any of those as well.
173 find -L objects/pack -maxdepth 1 -type f -name "tmp_pack_?*" -exec rm -f '{}' + || :
174 find -L objects/pack -maxdepth 1 -type f -name "tmp_idx_?*" -exec rm -f '{}' + || :
176 # Make sure we have a reflogs subdirectory and abort the update if not
177 # This should not count as a normal "bang" failure if unsuccessful
178 [ -d reflogs ] || mkdir -p reflogs >/dev/null 2>&1 || :
179 [ -d reflogs ]
181 # Create a "quarantine" area to fetch into
182 # This is set up similarly to the way the "repack" directory is set
183 # up for gc in that it's a subdirectory that's a whole "git" directory
184 # but it uses the existing objects directory as an alternate and its
185 # own objects subdirectory is a symlink to a subdirectory of the real
186 # objects directory (to guarantee that packs/objects can be moved rather
187 # than copied). It starts out with a copy of all of the project's refs.
188 # A successful fetch will "unquarantine" fetched objects/packs + ref changes
189 incoming_objs="$(mktemp -d "$PWD/objects/incoming-XXXXXX")"
190 incoming_objs="$(cd "$incoming_objs" && pwd -P)"
191 chmod "$dperms" "$incoming_objs"
192 mkdir "$incoming_objs/pack"
193 mkdir "$incoming_objs/info"
194 printf '%s\n' "$PWD/objects" >"$incoming_objs/info/alternates"
195 incoming_fetch="$(mktemp -d "$PWD/incoming-XXXXXX")"
196 incoming_fetch="$(cd "$incoming_fetch" && pwd -P)"
197 chmod "$dperms" "$incoming_fetch"
198 ln -s "$incoming_objs" "$incoming_fetch/objects"
199 mkdir "$incoming_fetch/refs"
200 ln -s "$PWD/config" "$incoming_fetch/config"
201 git for-each-ref --format='%(objectname) %(refname)' >"$incoming_fetch/packed-refs"
202 cat HEAD >"$incoming_fetch/HEAD"
203 # Make sure the incoming packed-refs file is properly peeled
204 git --git-dir="$incoming_fetch" pack-refs --all --prune
205 # link to svn if it exists
206 [ ! -d svn ] || ln -s "$PWD/svn" "$incoming_fetch/svn"
208 keep_bang_log=
209 do_check_after_refs=1
210 bang_setup
211 bang_action="update"
212 bang_trap() {
213 if [ -n "$1" ]; then
214 # Throttle retries
215 # Since gitweb shows the .last_refresh date, it's safe to update
216 # gitweb.lastrefresh to throttle the updates w/o corrupting the
217 # last refresh date display on the gitweb summary page
218 # It's therefore important that we do NOT touch .last_refresh here
219 config_set lastrefresh "$(date "$datefmt")"
223 bang echo "Project: $proj"
224 bang echo " Date: $(TZ=UTC date '+%Y-%m-%d %T UTC')"
225 bang echo ""
226 mail="$(config_get owner)" || :
227 url="$(config_get baseurl)" || :
228 case "$url" in *" "*|*" "*|"")
229 bang_eval 'echo "Bad mirror URL (\"$url\")"; ! :'
230 exit 1
231 esac
232 bang echo "Mirroring from URL \"$url\""
233 bang echo ""
234 statusok="$(git config --bool gitweb.statusupdates 2>/dev/null || echo true)"
235 mailaddrs=
236 [ "$statusok" = "false" ] || [ -z "$mail" ] || mailaddrs="$mail"
237 [ -z "$cfg_admincc" ] || [ "$cfg_admincc" = "0" ] || [ -z "$cfg_admin" ] ||
238 if [ -z "$mailaddrs" ]; then mailaddrs="$cfg_admin"; else mailaddrs="$mailaddrs,$cfg_admin"; fi
240 bang_eval "git for-each-ref --format '%(refname) %(objectname)' >.refs-temp"
241 bang_eval "LC_ALL=C sort -b -k1,1 <.refs-temp >.refs-before"
243 check_after_refs() {
244 [ -n "$do_check_after_refs" ] || return 0
245 bang_eval "git for-each-ref --format '%(refname) %(objectname)' >.refs-temp"
246 bang_eval "LC_ALL=C sort -b -k1,1 <.refs-temp >.refs-after"
247 refschanged=
248 cmp -s .refs-before .refs-after || refschanged=1
249 do_check_after_refs=
252 ! [ -e .delaygc ] || >.allowgc || :
254 # Make sure we don't get any unwanted loose objects
255 # Starting with Git v2.10.0 fast-import can generate loose objects unless we
256 # tweak its configuration to prevent that
257 git_add_config 'fetch.unpackLimit=1'
258 # Note the git config documentation is wrong
259 # transfer.unpackLimit, if set, overrides fetch.unpackLimit
260 git_add_config 'transfer.unpackLimit=1'
261 # But not the Git v2.10.0 and later fastimport.unpackLimit which improperly uses <= instead of <
262 git_add_config 'fastimport.unpackLimit=0'
264 # remember the starting time so we can easily combine fetched loose objects
265 # we sleep for 1 second after creating .needspack to make sure all objects are newer
266 if ! [ -e .needspack ]; then
267 rm -f .needspack
268 >.needspack
269 sleep 1
272 case "$url" in
273 svn://* | svn+http://* | svn+https://* | svn+file://* | svn+ssh://*)
274 [ -n "$cfg_mirror_svn" ] || { echo "Mirroring svn is disabled" >&2; exit 0; }
275 # Allow the username to be specified in the "svn-credential.svn.username"
276 # property and the password in the "svn-credential.svn.password" property
277 # Use an 'anonsvn' username by default as is commonly used for anonymous svn
278 # Default the password to the same as the username
279 # The password property will be ignored unless a username has been specified
280 if svnuser="$(git config --get svn-credential.svn.username)" && [ -n "$svnuser" ]; then
281 if ! svnpass="$(git config --get svn-credential.svn.password)"; then
282 svnpass="$svnuser"
284 url1="${url#*://}"
285 url1="${url1%%/*}"
286 case "$url1" in ?*"@"?*)
287 urlsch="${url%%://*}"
288 url="$urlsch://${url#*@}"
289 esac
290 else
291 # As a fallback, check in the URL, just in case
292 url1="${url#*://}"
293 url1="${url1%%/*}"
294 svnuser=
295 case "$url1" in ?*"@"?*)
296 urlsch="${url%%://*}"
297 url="$urlsch://${url#*@}"
298 url1="${url1%%@*}"
299 svnuser="${url1%%:*}"
300 if [ -n "$svnuser" ]; then
301 svnpass="$svnuser"
302 case "$url1" in *":"*)
303 svnpass="${url1#*:}"
304 esac
306 esac
307 if [ -z "$svnuser" ]; then
308 svnuser="anonsvn"
309 svnpass="anonsvn"
312 GIT_ASKPASS_PASSWORD="$svnpass"
313 export GIT_ASKPASS_PASSWORD
314 # Update the git svn url to match baseurl but be cognizant of any
315 # needed prefix changes. See the comments in taskd/clone.sh about
316 # why we need to put up with a prefix in the first place.
317 case "$url" in svn+ssh://*) svnurl="$url";; *) svnurl="${url#svn+}";; esac
318 svnurl="${svnurl%/}"
319 svnurlold="$(config_get svnurl)" || :
320 if [ "$svnurl" != "$svnurlold" ]; then
321 # We better already have an svn-remote.svn.fetch setting
322 bang test -n "$(git config --get-all svn-remote.svn.fetch)" || :
323 # the only way to truly know what the proper prefix is
324 # is to attempt a fresh git-svn init -s on the new url
325 rm -rf svn-new-url || :
326 # We require svn info to succeed on the URL otherwise it's
327 # simply not a valid URL and without using -s on the init it
328 # will not otherwise be tested until the fetch
329 bang eval 'svn --non-interactive --username "$svnuser" --password "$svnpass" info "$svnurl" >/dev/null'
330 bang mkdir svn-new-url
331 GIT_DIR=svn-new-url bang git init --bare --quiet
332 # We initially use -s for the init which will possibly shorten
333 # the URL. However, the shortening can fail if a password is
334 # not required for the longer version but is for the shorter,
335 # so try again without -s if the -s version fails.
336 cmdstr='git svn init --username="$svnuser" --prefix "" -s "$svnurl" <"$mtlinesfile" >/dev/null 2>&1 || '
337 cmdstr="$cmdstr"'git svn init --username="$svnuser" --prefix "" "$svnurl" <"$mtlinesfile" >/dev/null 2>&1'
338 GIT_DIR=svn-new-url bang eval "$cmdstr"
339 gitsvnurl="$(GIT_DIR=svn-new-url git config --get svn-remote.svn.url)" || :
340 gitsvnfetch="$(GIT_DIR=svn-new-url git config --get svn-remote.svn.fetch)" || :
341 gitsvnprefixnew="${gitsvnfetch%%:*}"
342 gitsvnsuffixnew="${gitsvnprefixnew##*/}"
343 gitsvnprefixnew="${gitsvnprefixnew%$gitsvnsuffixnew}"
344 rm -rf svn-new-url || :
345 # Using GIT_DIR= with bang leaves it set to svn-new-url, so reset it to .
346 GIT_DIR=.
347 if [ "$gitsvnurl" != "$(git config --get svn-remote.svn.url || :)" ]; then
348 # The url has been changed.
349 # We must update the url and replace the prefix on all config items
350 gitsvnfetch="$(git config --get-all svn-remote.svn.fetch | head -1)" || :
351 gitsvnprefixold="${gitsvnfetch%%:*}"
352 gitsvnsuffixold="${gitsvnprefixold##*/}"
353 gitsvnprefixold="${gitsvnprefixold%$gitsvnsuffixold}"
354 git config --remove-section 'svn-remote.svnnew' 2>/dev/null || :
355 git config 'svn-remote.svnnew.url' "$gitsvnurl"
356 git config --get-regexp '^svn-remote\.svn\.' |
357 while read -r sname sval; do
358 case "$sname" in
359 svn-remote.svn.fetch|svn-remote.svn.branches|svn-remote.svn.tags)
360 sname="${sname#svn-remote.svn.}"
361 sval="${sval#$gitsvnprefixold}"
362 bang git config --add "svn-remote.svnnew.$sname" "${gitsvnprefixnew}$sval"
363 esac
364 done
365 test $? -eq 0
366 bang git config -f svn/.metadata svn-remote.svn.reposRoot "$gitsvnurl"
367 bang git config --remove-section svn-remote.svn
368 bang git config --rename-section svn-remote.svnnew svn-remote.svn
370 bang config_set svnurl "$svnurl"
372 # remove any stale *.lock files greater than 1 hour old in case
373 # git-svn was killed on the last update because it took too long
374 find -L svn -type f -name '*.lock' -mmin +60 -exec rm -f '{}' + 2>/dev/null || :
375 GIROCCO_DIVERT_GIT_SVN_AUTO_GC=1
376 export GIROCCO_DIVERT_GIT_SVN_AUTO_GC
377 unset GIROCCO_SUPPRESS_AUTO_GC_UPDATE
378 saveconfig="$GIT_CONFIG_PARAMETERS"
379 git_add_config 'gc.auto=1'
380 git_add_config 'gc.autoPackLimit=1'
381 GIT_DIR=. bang git_ulimit svn fetch --log-window-size=$var_log_window_size --username="$svnuser" --quiet <"$mtlinesfile"
382 GIROCCO_SUPPRESS_AUTO_GC_UPDATE=1
383 export GIROCCO_SUPPRESS_AUTO_GC_UPDATE
384 unset GIROCCO_DIVERT_GIT_SVN_AUTO_GC
385 unset GIT_CONFIG_PARAMETERS
386 [ -z "$saveconfig" ] || {
387 GIT_CONFIG_PARAMETERS="$saveconfig"
388 export GIT_CONFIG_PARAMETERS
390 # git svn does not preserve group permissions in the svn subdirectory
391 chmod -R ug+rw,o+r svn
392 # git svn also leaves behind ref turds that end with @nnn
393 # We get rid of them now
394 git for-each-ref --format='%(refname)' |
395 LC_ALL=C sed '/^..*@[1-9][0-9]*$/!d; s/^/delete /' |
396 git_updateref_stdin
397 unset GIT_ASKPASS_PASSWORD
399 darcs://* | darcs+http://* | darcs+https://*)
400 [ -n "$cfg_mirror_darcs" ] || { echo "Mirroring darcs is disabled" >&2; exit 0; }
401 case "$url" in
402 darcs://*) darcsurl="http://${url#darcs://}";;
403 *) darcsurl="${url#darcs+}";;
404 esac
405 # remove any stale lock files greater than 1 hour old in case
406 # darcs_fast_export was killed on the last update because it took too long
407 find -L *.darcs -maxdepth 2 -type f -name 'lock' -mmin +60 -exec rm -f '{}' + 2>/dev/null || :
408 bang git_darcs_fetch "$darcsurl"
410 bzr://*)
411 [ -n "$cfg_mirror_bzr" ] || { echo "Mirroring bzr is disabled" >&2; exit 0; }
412 bzrurl="${url#bzr://}"
413 bang git_bzr_fetch "$bzrurl"
415 hg+http://* | hg+https://* | hg+file://* | hg+ssh://*)
416 [ -n "$cfg_mirror_hg" ] || { echo "Mirroring hg is disabled" >&2; exit 0; }
417 # We just remove hg+ here, so hg+http://... becomes http://...
418 hgurl="${url#hg+}"
419 # Fetch any new updates
420 bang hg -R "$(pwd)/repo.hg" pull
421 # Do the fast-export | fast-import
422 bang git_hg_fetch
425 [ "$url" = "$(git config --get remote.origin.url || :)" ] || bang config_set_raw remote.origin.url "$url"
426 pruneopt=--prune
427 [ "$(git config --bool fetch.prune 2>/dev/null || :)" != "false" ] || pruneopt=
428 if ! is_gfi_mirror_url "$url"; then
429 lastwasclean=
430 [ "$(git config --bool girocco.lastupdateclean 2>/dev/null || :)" != "true" ] || lastwasclean=1
431 nextisclean=
432 [ "$(git config --bool girocco.cleanmirror 2>/dev/null || :)" != "true" ] || nextisclean=1
433 if [ "$nextisclean" != "$lastwasclean" ]; then
434 if [ -n "$nextisclean" ]; then
435 git config --replace-all remote.origin.fetch "+refs/heads/*:refs/heads/*"
436 git config --add remote.origin.fetch "+refs/tags/*:refs/tags/*"
437 git config --add remote.origin.fetch "+refs/notes/*:refs/notes/*"
438 git config --add remote.origin.fetch "+refs/top-bases/*:refs/top-bases/*"
439 else
440 git config --replace-all remote.origin.fetch "+refs/*:refs/*"
444 # remember the starting time so we can easily detect new packs for fast-import mirrors
445 # we sleep for 1 second after creating .gfipack to make sure all packs are newer
446 if is_gfi_mirror_url "$url" && [ ! -e .gfipack ]; then
447 rm -f .gfipack
448 >.gfipack
449 sleep 1
451 fetcharg="default"
452 git config remotes.default >/dev/null 2>&1 || fetcharg="--all"
453 fetchcmd="git_ulimit fetch"
454 [ "$show_progress" != "0" ] || fetchcmd="git_ulimit fetch -q"
455 if [ -n "$var_have_git_171" ] && [ "${show_progress:-0}" != "0" ]; then
456 # git fetch learned --progress in v1.7.1
457 case "$show_progress" in
458 [2-9]*|1[0-9]*)
459 # full volume progress with all the spammy noise
460 fetchcmd="git_ulimit fetch --progress"
463 # a kinder, gentler progress that doesn't leave one
464 # covered all over in exploded bits of spam afterwards
465 fetchcmd="git_fetch_q_progress"
467 esac
469 # It's possible for a fetch to actually do something while still returning
470 # a non-zero result (perhaps some of the refs were updated but some were
471 # not -- a malicious Git-impersonation trying to set refs/heads/... refs
472 # to non-commit objects for example).
473 GIT_SSL_NO_VERIFY=1 bang_catch eval "$fetchcmd" $pruneopt --multiple "$fetcharg"
474 # If we did fetch anything, don't treat it as an error, but do keep the log;
475 # otherwise invoke bang_failed as for a normal failure
476 if [ "${bang_errcode:-0}" != "0" ]; then
477 save_bang_errcode="$bang_errcode"
478 check_after_refs
479 if [ -n "$refschanged" ]; then
480 keep_bang_log="git fetch${pruneopt:+ $pruneopt} --multiple $fetcharg"
481 else
482 bang_cmd="git fetch${pruneopt:+ $pruneopt} --multiple $fetcharg"
483 bang_errcode="$save_bang_errcode"
484 bang_failed
487 if ! is_gfi_mirror_url "$url" && [ "$nextisclean" != "$lastwasclean" ]; then
488 if [ -n "$nextisclean" ]; then
489 # We must manually purge the unclean refs now as even prune won't do it
490 git for-each-ref --format='%(refname)' |
491 LC_ALL=C sed \
492 -e '/^refs\/heads\//d' \
493 -e '/^refs\/tags\//d' \
494 -e '/^refs\/notes\//d' \
495 -e '/^refs\/top-bases\//d' \
496 -e 's/^/delete /' |
497 git_updateref_stdin
499 git config --bool girocco.lastupdateclean ${nextisclean:-0}
501 if [ -e .gfipack ] && is_gfi_mirror_url "$url"; then
502 find -L objects/pack -type f -newer .gfipack -name "pack-$octet20*.pack" -print >>gfi-packs
503 rm -f .gfipack
506 esac
508 # The objects subdirectories permissions must be updated now.
509 # In the case of a dumb http clone, the permissions will not be correct
510 # (missing group write) despite the core.sharedrepository=1 setting!
511 # The objects themselves seem to have the correct permissions.
512 # This problem appears to have been fixed in the most recent git versions.
513 perms=g+w
514 [ "$cfg_permission_control" != "Hooks" ] || perms=go+w
515 chmod $perms $(find -L objects -maxdepth 1 -type d) 2>/dev/null || :
517 # We maintain the last refresh date in two places deliberately
518 # so that it's available as part of the config data and also
519 # as a standalone file timestamp that can be accessed without git.
520 bang config_set lastrefresh "$(date "$datefmt")"
521 { >.last_refresh; } 2>/dev/null || :
523 # Check to see if any refs changed
524 check_after_refs
526 # Update server info if any refs changed (if they didn't packs shouldn't have either)
527 [ -z "$refschanged" ] || bang git update-server-info
529 # Pack all refs if any changed to keep things as efficient as possible
530 # Project mirror updates do not occur that often therefore this is a win
531 # However, if pack-refs fails for some reason, we can just ignore and continue
532 # The "--prune" option is the default since v1.5.0 but it serves as "documentation" here
533 [ -z "$refschanged" ] || git pack-refs --all --prune || :
535 # Force a mini-gc if $Girocco::Config::delay_gfi_redelta is false and there's
536 # at least one gfi pack present now
537 if [ -z "$cfg_delay_gfi_redelta" ] && ! [ -e .needsgc ] &&
538 [ -f gfi-packs ] && [ -s gfi-packs ] && is_gfi_mirror_url "$url"; then
539 >.needsgc
542 # Activate a mini-gc if needed
543 check_and_set_needsgc
545 # Look at which refs changed and trigger ref-change for these
546 sockpath="$cfg_chroot/etc/taskd.socket"
547 if [ -n "$refschanged" ]; then
548 bang config_set lastreceive "$(date '+%a, %d %b %Y %T %z')"
549 # We always use UTC for the log timestamp so that chroot and non-chroot match up.
550 # We don't have to worry about multiple log files since only one update runs
551 lognamets="$(TZ=UTC date '+%Y%m%d_%H%M%S')"
552 loghhmmss="${lognamets##*_}"
553 logname="reflogs/${lognamets%%_*}"
554 # We freshen the mod time to now on any old or new ref that is a loose object
555 # For old refs we do it so we will be able to keep them around for 1 day
556 # For new refs we do it in case we are about to run gc and the new ref
557 # actually points to an oldish loose object that had been unreachable
558 # We probably do not need to do it for new refs as Git tries to do that,
559 # but since we're already doing it for old refs (which Git does not do),
560 # it's almost no extra work for new refs, just in case.
562 echo "ref-changes %$proj% $proj"
563 LC_ALL=C join .refs-before .refs-after |
564 LC_ALL=C sed -e '/^[^ ][^ ]* \([^ ][^ ]*\) \1$/d' |
565 while read ref old new; do
566 echo "$loghhmmss $old $new $ref" >&3
567 freshen_loose_objects "$old" "$new"
568 echo "$old $new $ref"
569 done
570 LC_ALL=C join -v 1 .refs-before .refs-after |
571 while read ref old; do
572 echo "$loghhmmss $old 0000000000000000000000000000000000000000 $ref" >&3
573 freshen_loose_objects "$old"
574 echo "$old 0000000000000000000000000000000000000000 $ref"
575 done
576 LC_ALL=C join -v 2 .refs-before .refs-after |
577 while read ref new; do
578 echo "$loghhmmss 0000000000000000000000000000000000000000 $new $ref" >&3
579 freshen_loose_objects "$new"
580 echo "0000000000000000000000000000000000000000 $new $ref"
581 done
582 git for-each-ref --format='%(objectname) %(objectname) %(refname)' refs/heads
583 echo "done ref-changes %$proj% $proj"
584 } >.refs-temp 3>>"$logname"
585 if [ -S "$sockpath" ]; then
586 trap ':' PIPE
587 nc_openbsd -w 15 -U "$sockpath" <.refs-temp || :
588 trap - PIPE
590 bang config_set lastchange "$(date '+%a, %d %b %Y %T %z')"
591 bang_eval "git for-each-ref --sort=-committerdate --format='%(committerdate:iso8601)' \
592 --count=1 refs/heads >info/lastactivity"
593 ! [ -d htmlcache ] || { >htmlcache/changed; } 2>/dev/null || :
594 rm -f .delaygc .allowgc
596 [ "${cfg_autogchack:-0}" != "0" ] &&
597 [ "$(git config --get --bool girocco.autogchack 2>/dev/null)" != "false" ]
598 then
599 mv -f .refs-after .refs-last
603 # If the repository does not yet have a valid HEAD symref try to set one
604 # If an empty repository was cloned and then later becomes unempty you just
605 # lose out on the fancy "symref=HEAD:" logic and get this version instead
606 check_and_set_head || :
608 rm -f .refs-before .refs-after .refs-temp FETCH_HEAD
610 if is_banged; then
611 [ -z "$mailaddrs" ] || ! was_banged_message_sent ||
613 echo "$proj update succeeded - failure recovery"
614 echo "this status message may be disabled on the project admin page"
615 } | mailref "update@$cfg_gitweburl/$proj.git" -s "[$cfg_name] $proj update succeeded" "$mailaddrs" || :
616 bang_reset
619 if [ -n "$keep_bang_log" ] && [ -s "$bang_log" ]; then
620 cat "$bang_log" >.banglog
621 echo "" >>.banglog
622 echo "$keep_bang_log failed with error code $save_bang_errcode" >>.banglog
625 progress "- [$proj] update ($(date))"