updategc-util-functions.sh: parametize dupe_file temp name
[girocco.git] / jobd / update.sh
blob80e5edc0049f00b04b6aa4d67f2280e3f206a6c4
1 #!/bin/sh
3 . @basedir@/shlib.sh
4 . @basedir@/jobd/updategc-util-functions.sh
6 set -e
8 if [ $# -ne 1 ]; then
9 echo "Usage: update.sh projname" >&2
10 exit 1
13 # date -R is linux-only, POSIX equivalent is '+%a, %d %b %Y %T %z'
14 datefmt='+%a, %d %b %Y %T %z'
16 git_fetch_q_progress() {
17 if [ "${cfg_max_file_size512:-0}" != "0" ]; then
18 GIT_BIN="'$cfg_basedir/bin/ulimit512' -i -f '$cfg_max_file_size512' -- '$cfg_git_bin'" &&
19 export GIT_BIN
21 PATH="$var_git_exec_path:$cfg_basedir/bin:$PATH" @basedir@/jobd/git-fetch-q-progress.sh "$@"
24 # freshen_loose_objects full-sha ...
25 # if "$n" is a loose object, set its modification time to now
26 # otherwise silently do nothing with no error. To facilitate conversion
27 # of mirror projects to push projects we also add group write permission.
28 freshen_loose_objects() {
29 _list=
30 for _sha; do
31 _fn="${_sha#??}"
32 _shard="${_sha%$_fn}"
33 _list="$_list objects/$_shard/$_fn"
34 done
35 if [ -n "$_list" ]; then
36 chmod ug+w $_list 2>/dev/null || :
37 touch -c $_list 2>/dev/null || :
41 # darcs fast-export | git fast-import with error handling
42 git_darcs_fetch() (
43 set_utf8_locale
44 _err1=
45 _err2=
46 exec 3>&1
47 { read -r _err1 || :; read -r _err2 || :; } <<-EOT
49 exec 4>&3 3>&1 1>&4 4>&-
51 _e1=0
52 "$cfg_basedir"/bin/darcs-fast-export \
53 --export-marks="$(pwd)/dfe-marks" \
54 --import-marks="$(pwd)/dfe-marks" "$1" 3>&- || _e1=$?
55 echo $_e1 >&3
56 } |
58 _e2=0
59 git_ulimit fast-import \
60 --export-marks="$(pwd)/gfi-marks" \
61 --export-pack-edges="$(pwd)/gfi-packs" \
62 --import-marks="$(pwd)/gfi-marks" \
63 --force 3>&- || _e2=$?
64 echo $_e2 >&3
67 EOT
68 exec 3>&-
69 [ "$_err1" = 0 ] && [ "$_err2" = 0 ]
70 return $?
73 # bzr fast-export | git fast-import with error handling
74 git_bzr_fetch() (
75 set_utf8_locale
76 BZR_LOG=/dev/null
77 export BZR_LOG
78 _err1=
79 _err2=
80 exec 3>&1
81 { read -r _err1 || :; read -r _err2 || :; } <<-EOT
83 exec 4>&3 3>&1 1>&4 4>&-
85 _e1=0
86 bzr fast-export --plain \
87 --export-marks="$(pwd)/bfe-marks" \
88 --import-marks="$(pwd)/bfe-marks" "$1" 3>&- || _e1=$?
89 echo $_e1 >&3
90 } |
92 _e2=0
93 git_ulimit fast-import \
94 --export-marks="$(pwd)/gfi-marks" \
95 --export-pack-edges="$(pwd)/gfi-packs" \
96 --import-marks="$(pwd)/gfi-marks" \
97 --force 3>&- || _e2=$?
98 echo $_e2 >&3
102 exec 3>&-
103 [ "$_err1" = 0 ] && [ "$_err2" = 0 ]
104 return $?
107 # On return a "$lockf" will have been created that must be removed when gc is done
108 lock_update() {
109 v_lock_file _lockresult "update.pid" || {
110 echo >&2 "[$proj] $_lockresult"
111 exit 1
113 lockf="$_lockresult"
116 [ -n "$cfg_mirror" ] || { echo "Mirroring is disabled" >&2; exit 0; }
118 umask 002
119 dperms=2775
120 [ "$cfg_permission_control" != "Hooks" ] || { umask 000; dperms=2777; }
121 clean_git_env
123 proj="${1%.git}"
124 cd "$cfg_reporoot/$proj.git"
126 # Activate a mini-gc if needed
127 # We do this here as well as after a successful fetch so that if we're stuck
128 # in a fetch loop where fetches are succeeding in fetching new packs but the
129 # ref update is failing for some reason (perhaps a non-commit under refs/heads)
130 # and a previous invokation therefore had a "bang" exit then we will still
131 # get the .needsgc flag set in a timely fashion to avoid excess pack build up.
132 check_and_set_needsgc
134 bang_log=
135 incoming_fetch=
136 incoming_objs=
137 lockf=
138 cleanup_exit() {
139 ec=$?
140 if [ $ec != 0 ]; then
141 echo "update failed dir: $PWD" >&2
143 [ -z "$incoming_fetch" ] || rm -rf "$incoming_fetch"
144 [ -z "$incoming_objs" ] || rm -rf "$incoming_objs"
145 [ -z "$bang_log" ] || rm -f "$bang_log"
146 [ -z "$lockf" ] || rm -f "$lockf"
148 trap 'cleanup_exit' EXIT
149 trap 'exit 129' HUP
150 trap 'exit 130' INT
151 trap 'exit 131' QUIT
152 trap 'exit 134' ABRT
153 trap 'exit 141' PIPE
154 trap 'exit 142' ALRM
155 trap 'exit 143' TERM
157 if [ "${force_update:-0}" = "0" ] && check_interval lastrefresh $cfg_min_mirror_interval; then
158 progress "= [$proj] update skip (last at $(config_get lastrefresh))"
159 exit 0
161 if [ -e .nofetch ]; then
162 progress "x [$proj] update disabled (.nofetch exists)"
163 exit 0
165 lock_update
166 progress "+ [$proj] update ($(date))"
168 # Any pre-existing FETCH_HEAD from a previous fetch, failed or not, is garbage
169 rm -f FETCH_HEAD
171 # Remove any stale ref locks
172 clear_stale_ref_locks
174 # Remove any stale incoming-* object quarantine directories that are
175 # more than 12 hours old. These are new with Git >= 2.11.0.
176 # But we also create our own during the fetch process as Git's quarantine
177 # only applies to incoming receive-pack which we imitate for our fetch.
178 find -L . objects -maxdepth 1 -type d -name 'incoming-?*' -mmin +720 \
179 -exec rm -rf '{}' + || :
181 # A previous failed update attempt can leave a huge tmp_pack_XXXXXX file behind.
182 # Since no pushes are allowed to mirrors, we know that any such files that exist
183 # at this point in time are garbage and can be safely deleted, we do not even
184 # need to check how old they are. A tmp_idx_XXXXXX file is also created during
185 # the later stages of the fetch process, so we kill any of those as well.
186 find -L objects/pack -maxdepth 1 -type f -name "tmp_pack_?*" -exec rm -f '{}' + || :
187 find -L objects/pack -maxdepth 1 -type f -name "tmp_idx_?*" -exec rm -f '{}' + || :
189 # Make sure we have a reflogs subdirectory and abort the update if not
190 # This should not count as a normal "bang" failure if unsuccessful
191 [ -d reflogs ] || mkdir -p reflogs >/dev/null 2>&1 || :
192 [ -d reflogs ]
194 # Create a "quarantine" area to fetch into
195 # This is set up similarly to the way the "repack" directory is set
196 # up for gc in that it's a subdirectory that's a whole "git" directory
197 # but it uses the existing objects directory as an alternate and its
198 # own objects subdirectory is a symlink to a subdirectory of the real
199 # objects directory (to guarantee that packs/objects can be moved rather
200 # than copied). It starts out with a copy of all of the project's refs.
201 # A successful fetch will "unquarantine" fetched objects/packs + ref changes
202 incoming_objs="$(mktemp -d "$PWD/objects/incoming-XXXXXX")"
203 incoming_objs="$(cd "$incoming_objs" && pwd -P)"
204 chmod "$dperms" "$incoming_objs"
205 mkdir "$incoming_objs/pack"
206 mkdir "$incoming_objs/info"
207 printf '%s\n' "$PWD/objects" >"$incoming_objs/info/alternates"
208 incoming_fetch="$(mktemp -d "$PWD/incoming-XXXXXX")"
209 incoming_fetch="$(cd "$incoming_fetch" && pwd -P)"
210 chmod "$dperms" "$incoming_fetch"
211 ln -s "$incoming_objs" "$incoming_fetch/objects"
212 mkdir "$incoming_fetch/refs"
213 ln -s "$PWD/config" "$incoming_fetch/config"
214 git for-each-ref --format='%(objectname) %(refname)' >"$incoming_fetch/packed-refs"
215 cat HEAD >"$incoming_fetch/HEAD"
216 # Make sure the incoming packed-refs file is properly peeled
217 git --git-dir="$incoming_fetch" pack-refs --all --prune
218 # link to svn if it exists
219 [ ! -d svn ] || ln -s "$PWD/svn" "$incoming_fetch/svn"
221 keep_bang_log=
222 do_check_after_refs=1
223 bang_setup
224 bang_action="update"
225 bang_trap() {
226 if [ -n "$1" ]; then
227 # Throttle retries
228 # Since gitweb shows the .last_refresh date, it's safe to update
229 # gitweb.lastrefresh to throttle the updates w/o corrupting the
230 # last refresh date display on the gitweb summary page
231 # It's therefore important that we do NOT touch .last_refresh here
232 config_set lastrefresh "$(date "$datefmt")"
236 bang echo "Project: $proj"
237 bang echo " Date: $(TZ=UTC date '+%Y-%m-%d %T UTC')"
238 bang echo ""
239 mail="$(config_get owner)" || :
240 url="$(config_get baseurl)" || :
241 case "$url" in *" "*|*" "*|"")
242 bang_eval 'echo "Bad mirror URL (\"$url\")"; ! :'
243 exit 1
244 esac
245 bang echo "Mirroring from URL \"$url\""
246 bang echo ""
247 statusok="$(git config --bool gitweb.statusupdates 2>/dev/null || echo true)"
248 mailaddrs=
249 [ "$statusok" = "false" ] || [ -z "$mail" ] || mailaddrs="$mail"
250 [ -z "$cfg_admincc" ] || [ "$cfg_admincc" = "0" ] || [ -z "$cfg_admin" ] ||
251 if [ -z "$mailaddrs" ]; then mailaddrs="$cfg_admin"; else mailaddrs="$mailaddrs,$cfg_admin"; fi
253 bang_eval "git for-each-ref --format '%(refname) %(objectname)' >.refs-temp"
254 bang_eval "LC_ALL=C sort -b -k1,1 <.refs-temp >.refs-before"
256 check_after_refs() {
257 [ -n "$do_check_after_refs" ] || return 0
258 bang_eval "git for-each-ref --format '%(refname) %(objectname)' >.refs-temp"
259 bang_eval "LC_ALL=C sort -b -k1,1 <.refs-temp >.refs-after"
260 refschanged=
261 cmp -s .refs-before .refs-after || refschanged=1
262 do_check_after_refs=
265 ! [ -e .delaygc ] || >.allowgc || :
267 # Make sure we don't get any unwanted loose objects
268 # Starting with Git v2.10.0 fast-import can generate loose objects unless we
269 # tweak its configuration to prevent that
270 git_add_config 'fetch.unpackLimit=1'
271 # Note the git config documentation is wrong
272 # transfer.unpackLimit, if set, overrides fetch.unpackLimit
273 git_add_config 'transfer.unpackLimit=1'
274 # But not the Git v2.10.0 and later fastimport.unpackLimit which improperly uses <= instead of <
275 git_add_config 'fastimport.unpackLimit=0'
277 # remember the starting time so we can easily combine fetched loose objects
278 # we sleep for 1 second after creating .needspack to make sure all objects are newer
279 if ! [ -e .needspack ]; then
280 rm -f .needspack
281 >.needspack
282 sleep 1
285 case "$url" in
286 svn://* | svn+http://* | svn+https://* | svn+file://* | svn+ssh://*)
287 [ -n "$cfg_mirror_svn" ] || { echo "Mirroring svn is disabled" >&2; exit 0; }
288 # Allow the username to be specified in the "svn-credential.svn.username"
289 # property and the password in the "svn-credential.svn.password" property
290 # Use an 'anonsvn' username by default as is commonly used for anonymous svn
291 # Default the password to the same as the username
292 # The password property will be ignored unless a username has been specified
293 if svnuser="$(git config --get svn-credential.svn.username)" && [ -n "$svnuser" ]; then
294 if ! svnpass="$(git config --get svn-credential.svn.password)"; then
295 svnpass="$svnuser"
297 url1="${url#*://}"
298 url1="${url1%%/*}"
299 case "$url1" in ?*"@"?*)
300 urlsch="${url%%://*}"
301 url="$urlsch://${url#*@}"
302 esac
303 else
304 # As a fallback, check in the URL, just in case
305 url1="${url#*://}"
306 url1="${url1%%/*}"
307 svnuser=
308 case "$url1" in ?*"@"?*)
309 urlsch="${url%%://*}"
310 url="$urlsch://${url#*@}"
311 url1="${url1%%@*}"
312 svnuser="${url1%%:*}"
313 if [ -n "$svnuser" ]; then
314 svnpass="$svnuser"
315 case "$url1" in *":"*)
316 svnpass="${url1#*:}"
317 esac
319 esac
320 if [ -z "$svnuser" ]; then
321 svnuser="anonsvn"
322 svnpass="anonsvn"
325 GIT_ASKPASS_PASSWORD="$svnpass"
326 export GIT_ASKPASS_PASSWORD
327 # Update the git svn url to match baseurl but be cognizant of any
328 # needed prefix changes. See the comments in taskd/clone.sh about
329 # why we need to put up with a prefix in the first place.
330 case "$url" in svn+ssh://*) svnurl="$url";; *) svnurl="${url#svn+}";; esac
331 svnurl="${svnurl%/}"
332 svnurlold="$(config_get svnurl)" || :
333 if [ "$svnurl" != "$svnurlold" ]; then
334 # We better already have an svn-remote.svn.fetch setting
335 bang test -n "$(git config --get-all svn-remote.svn.fetch)" || :
336 # the only way to truly know what the proper prefix is
337 # is to attempt a fresh git-svn init -s on the new url
338 rm -rf svn-new-url || :
339 # We require svn info to succeed on the URL otherwise it's
340 # simply not a valid URL and without using -s on the init it
341 # will not otherwise be tested until the fetch
342 bang eval 'svn --non-interactive --username "$svnuser" --password "$svnpass" info "$svnurl" >/dev/null'
343 bang mkdir svn-new-url
344 GIT_DIR=svn-new-url bang git init --bare --quiet
345 # We initially use -s for the init which will possibly shorten
346 # the URL. However, the shortening can fail if a password is
347 # not required for the longer version but is for the shorter,
348 # so try again without -s if the -s version fails.
349 cmdstr='git svn init --username="$svnuser" --prefix "" -s "$svnurl" <"$mtlinesfile" >/dev/null 2>&1 || '
350 cmdstr="$cmdstr"'git svn init --username="$svnuser" --prefix "" "$svnurl" <"$mtlinesfile" >/dev/null 2>&1'
351 GIT_DIR=svn-new-url bang eval "$cmdstr"
352 gitsvnurl="$(GIT_DIR=svn-new-url git config --get svn-remote.svn.url)" || :
353 gitsvnfetch="$(GIT_DIR=svn-new-url git config --get svn-remote.svn.fetch)" || :
354 gitsvnprefixnew="${gitsvnfetch%%:*}"
355 gitsvnsuffixnew="${gitsvnprefixnew##*/}"
356 gitsvnprefixnew="${gitsvnprefixnew%$gitsvnsuffixnew}"
357 rm -rf svn-new-url || :
358 # Using GIT_DIR= with bang leaves it set to svn-new-url, so reset it to .
359 GIT_DIR=.
360 if [ "$gitsvnurl" != "$(git config --get svn-remote.svn.url || :)" ]; then
361 # The url has been changed.
362 # We must update the url and replace the prefix on all config items
363 gitsvnfetch="$(git config --get-all svn-remote.svn.fetch | head -1)" || :
364 gitsvnprefixold="${gitsvnfetch%%:*}"
365 gitsvnsuffixold="${gitsvnprefixold##*/}"
366 gitsvnprefixold="${gitsvnprefixold%$gitsvnsuffixold}"
367 git config --remove-section 'svn-remote.svnnew' 2>/dev/null || :
368 git config 'svn-remote.svnnew.url' "$gitsvnurl"
369 git config --get-regexp '^svn-remote\.svn\.' |
370 while read -r sname sval; do
371 case "$sname" in
372 svn-remote.svn.fetch|svn-remote.svn.branches|svn-remote.svn.tags)
373 sname="${sname#svn-remote.svn.}"
374 sval="${sval#$gitsvnprefixold}"
375 bang git config --add "svn-remote.svnnew.$sname" "${gitsvnprefixnew}$sval"
376 esac
377 done
378 test $? -eq 0
379 bang git config -f svn/.metadata svn-remote.svn.reposRoot "$gitsvnurl"
380 bang git config --remove-section svn-remote.svn
381 bang git config --rename-section svn-remote.svnnew svn-remote.svn
383 bang config_set svnurl "$svnurl"
385 # remove any stale *.lock files greater than 1 hour old in case
386 # git-svn was killed on the last update because it took too long
387 find -L svn -type f -name '*.lock' -mmin +60 -exec rm -f '{}' + 2>/dev/null || :
388 GIROCCO_DIVERT_GIT_SVN_AUTO_GC=1
389 export GIROCCO_DIVERT_GIT_SVN_AUTO_GC
390 unset GIROCCO_SUPPRESS_AUTO_GC_UPDATE
391 saveconfig="$GIT_CONFIG_PARAMETERS"
392 git_add_config 'gc.auto=1'
393 git_add_config 'gc.autoPackLimit=1'
394 GIT_DIR=. bang git_ulimit svn fetch --log-window-size=$var_log_window_size --username="$svnuser" --quiet <"$mtlinesfile"
395 GIROCCO_SUPPRESS_AUTO_GC_UPDATE=1
396 export GIROCCO_SUPPRESS_AUTO_GC_UPDATE
397 unset GIROCCO_DIVERT_GIT_SVN_AUTO_GC
398 unset GIT_CONFIG_PARAMETERS
399 [ -z "$saveconfig" ] || {
400 GIT_CONFIG_PARAMETERS="$saveconfig"
401 export GIT_CONFIG_PARAMETERS
403 # git svn does not preserve group permissions in the svn subdirectory
404 chmod -R ug+rw,o+r svn
405 # git svn also leaves behind ref turds that end with @nnn
406 # We get rid of them now
407 git for-each-ref --format='%(refname)' |
408 LC_ALL=C sed '/^..*@[1-9][0-9]*$/!d; s/^/delete /' |
409 git_updateref_stdin
410 unset GIT_ASKPASS_PASSWORD
412 darcs://* | darcs+http://* | darcs+https://*)
413 [ -n "$cfg_mirror_darcs" ] || { echo "Mirroring darcs is disabled" >&2; exit 0; }
414 case "$url" in
415 darcs://*) darcsurl="http://${url#darcs://}";;
416 *) darcsurl="${url#darcs+}";;
417 esac
418 # remove any stale lock files greater than 1 hour old in case
419 # darcs_fast_export was killed on the last update because it took too long
420 find -L *.darcs -maxdepth 2 -type f -name 'lock' -mmin +60 -exec rm -f '{}' + 2>/dev/null || :
421 bang git_darcs_fetch "$darcsurl"
423 bzr://*)
424 [ -n "$cfg_mirror_bzr" ] || { echo "Mirroring bzr is disabled" >&2; exit 0; }
425 bzrurl="${url#bzr://}"
426 bang git_bzr_fetch "$bzrurl"
428 hg+http://* | hg+https://* | hg+file://* | hg+ssh://*)
429 [ -n "$cfg_mirror_hg" ] || { echo "Mirroring hg is disabled" >&2; exit 0; }
430 # We just remove hg+ here, so hg+http://... becomes http://...
431 hgurl="${url#hg+}"
432 # Fetch any new updates
433 bang hg -R "$(pwd)/repo.hg" pull
434 # Do the fast-export | fast-import
435 bang git_hg_fetch
438 [ "$url" = "$(git config --get remote.origin.url || :)" ] || bang config_set_raw remote.origin.url "$url"
439 pruneopt=--prune
440 [ "$(git config --bool fetch.prune 2>/dev/null || :)" != "false" ] || pruneopt=
441 if ! is_gfi_mirror_url "$url"; then
442 lastwasclean=
443 [ "$(git config --bool girocco.lastupdateclean 2>/dev/null || :)" != "true" ] || lastwasclean=1
444 nextisclean=
445 [ "$(git config --bool girocco.cleanmirror 2>/dev/null || :)" != "true" ] || nextisclean=1
446 if [ "$nextisclean" != "$lastwasclean" ]; then
447 if [ -n "$nextisclean" ]; then
448 git config --replace-all remote.origin.fetch "+refs/heads/*:refs/heads/*"
449 git config --add remote.origin.fetch "+refs/tags/*:refs/tags/*"
450 git config --add remote.origin.fetch "+refs/notes/*:refs/notes/*"
451 git config --add remote.origin.fetch "+refs/top-bases/*:refs/top-bases/*"
452 else
453 git config --replace-all remote.origin.fetch "+refs/*:refs/*"
457 # remember the starting time so we can easily detect new packs for fast-import mirrors
458 # we sleep for 1 second after creating .gfipack to make sure all packs are newer
459 if is_gfi_mirror_url "$url" && [ ! -e .gfipack ]; then
460 rm -f .gfipack
461 >.gfipack
462 sleep 1
464 fetcharg="default"
465 git config remotes.default >/dev/null 2>&1 || fetcharg="--all"
466 fetchcmd="git_ulimit fetch"
467 [ "$show_progress" != "0" ] || fetchcmd="git_ulimit fetch -q"
468 if [ -n "$var_have_git_171" ] && [ "${show_progress:-0}" != "0" ]; then
469 # git fetch learned --progress in v1.7.1
470 case "$show_progress" in
471 [2-9]*|1[0-9]*)
472 # full volume progress with all the spammy noise
473 fetchcmd="git_ulimit fetch --progress"
476 # a kinder, gentler progress that doesn't leave one
477 # covered all over in exploded bits of spam afterwards
478 fetchcmd="git_fetch_q_progress"
480 esac
482 # It's possible for a fetch to actually do something while still returning
483 # a non-zero result (perhaps some of the refs were updated but some were
484 # not -- a malicious Git-impersonation trying to set refs/heads/... refs
485 # to non-commit objects for example).
486 GIT_SSL_NO_VERIFY=1 bang_catch eval "$fetchcmd" $pruneopt --multiple "$fetcharg"
487 # If we did fetch anything, don't treat it as an error, but do keep the log;
488 # otherwise invoke bang_failed as for a normal failure
489 if [ "${bang_errcode:-0}" != "0" ]; then
490 save_bang_errcode="$bang_errcode"
491 check_after_refs
492 if [ -n "$refschanged" ]; then
493 keep_bang_log="git fetch${pruneopt:+ $pruneopt} --multiple $fetcharg"
494 else
495 bang_cmd="git fetch${pruneopt:+ $pruneopt} --multiple $fetcharg"
496 bang_errcode="$save_bang_errcode"
497 bang_failed
500 if ! is_gfi_mirror_url "$url" && [ "$nextisclean" != "$lastwasclean" ]; then
501 if [ -n "$nextisclean" ]; then
502 # We must manually purge the unclean refs now as even prune won't do it
503 git for-each-ref --format='%(refname)' |
504 LC_ALL=C sed \
505 -e '/^refs\/heads\//d' \
506 -e '/^refs\/tags\//d' \
507 -e '/^refs\/notes\//d' \
508 -e '/^refs\/top-bases\//d' \
509 -e 's/^/delete /' |
510 git_updateref_stdin
512 git config --bool girocco.lastupdateclean ${nextisclean:-0}
514 if [ -e .gfipack ] && is_gfi_mirror_url "$url"; then
515 find -L objects/pack -type f -newer .gfipack -name "pack-$octet20*.pack" -print >>gfi-packs
516 rm -f .gfipack
519 esac
521 # The objects subdirectories permissions must be updated now.
522 # In the case of a dumb http clone, the permissions will not be correct
523 # (missing group write) despite the core.sharedrepository=1 setting!
524 # The objects themselves seem to have the correct permissions.
525 # This problem appears to have been fixed in the most recent git versions.
526 perms=g+w
527 [ "$cfg_permission_control" != "Hooks" ] || perms=go+w
528 chmod $perms $(find -L objects -maxdepth 1 -type d) 2>/dev/null || :
530 # We maintain the last refresh date in two places deliberately
531 # so that it's available as part of the config data and also
532 # as a standalone file timestamp that can be accessed without git.
533 bang config_set lastrefresh "$(date "$datefmt")"
534 { >.last_refresh; } 2>/dev/null || :
536 # Check to see if any refs changed
537 check_after_refs
539 # Update server info if any refs changed (if they didn't packs shouldn't have either)
540 [ -z "$refschanged" ] || bang git update-server-info
542 # Pack all refs if any changed to keep things as efficient as possible
543 # Project mirror updates do not occur that often therefore this is a win
544 # However, if pack-refs fails for some reason, we can just ignore and continue
545 # The "--prune" option is the default since v1.5.0 but it serves as "documentation" here
546 [ -z "$refschanged" ] || git pack-refs --all --prune || :
548 # Force a mini-gc if $Girocco::Config::delay_gfi_redelta is false and there's
549 # at least one gfi pack present now
550 if [ -z "$cfg_delay_gfi_redelta" ] && ! [ -e .needsgc ] &&
551 [ -f gfi-packs ] && [ -s gfi-packs ] && is_gfi_mirror_url "$url"; then
552 >.needsgc
555 # Activate a mini-gc if needed
556 check_and_set_needsgc
558 # Look at which refs changed and trigger ref-change for these
559 sockpath="$cfg_chroot/etc/taskd.socket"
560 if [ -n "$refschanged" ]; then
561 bang config_set lastreceive "$(date '+%a, %d %b %Y %T %z')"
562 # We always use UTC for the log timestamp so that chroot and non-chroot match up.
563 # We don't have to worry about multiple log files since only one update runs
564 lognamets="$(TZ=UTC date '+%Y%m%d_%H%M%S')"
565 loghhmmss="${lognamets##*_}"
566 logname="reflogs/${lognamets%%_*}"
567 # We freshen the mod time to now on any old or new ref that is a loose object
568 # For old refs we do it so we will be able to keep them around for 1 day
569 # For new refs we do it in case we are about to run gc and the new ref
570 # actually points to an oldish loose object that had been unreachable
571 # We probably do not need to do it for new refs as Git tries to do that,
572 # but since we're already doing it for old refs (which Git does not do),
573 # it's almost no extra work for new refs, just in case.
575 echo "ref-changes %$proj% $proj"
576 LC_ALL=C join .refs-before .refs-after |
577 LC_ALL=C sed -e '/^[^ ][^ ]* \([^ ][^ ]*\) \1$/d' |
578 while read ref old new; do
579 echo "$loghhmmss $old $new $ref" >&3
580 freshen_loose_objects "$old" "$new"
581 echo "$old $new $ref"
582 done
583 LC_ALL=C join -v 1 .refs-before .refs-after |
584 while read ref old; do
585 echo "$loghhmmss $old 0000000000000000000000000000000000000000 $ref" >&3
586 freshen_loose_objects "$old"
587 echo "$old 0000000000000000000000000000000000000000 $ref"
588 done
589 LC_ALL=C join -v 2 .refs-before .refs-after |
590 while read ref new; do
591 echo "$loghhmmss 0000000000000000000000000000000000000000 $new $ref" >&3
592 freshen_loose_objects "$new"
593 echo "0000000000000000000000000000000000000000 $new $ref"
594 done
595 git for-each-ref --format='%(objectname) %(objectname) %(refname)' refs/heads
596 echo "done ref-changes %$proj% $proj"
597 } >.refs-temp 3>>"$logname"
598 if [ -S "$sockpath" ]; then
599 trap ':' PIPE
600 nc_openbsd -w 15 -U "$sockpath" <.refs-temp || :
601 trap - PIPE
603 bang config_set lastchange "$(date '+%a, %d %b %Y %T %z')"
604 bang_eval "git for-each-ref --sort=-committerdate --format='%(committerdate:iso8601)' \
605 --count=1 refs/heads >info/lastactivity"
606 ! [ -d htmlcache ] || { >htmlcache/changed; } 2>/dev/null || :
607 rm -f .delaygc .allowgc
609 [ "${cfg_autogchack:-0}" != "0" ] &&
610 [ "$(git config --get --bool girocco.autogchack 2>/dev/null)" != "false" ]
611 then
612 mv -f .refs-after .refs-last
616 # If the repository does not yet have a valid HEAD symref try to set one
617 # If an empty repository was cloned and then later becomes unempty you just
618 # lose out on the fancy "symref=HEAD:" logic and get this version instead
619 check_and_set_head || :
621 rm -f .refs-before .refs-after .refs-temp FETCH_HEAD
623 if is_banged; then
624 [ -z "$mailaddrs" ] || ! was_banged_message_sent ||
626 echo "$proj update succeeded - failure recovery"
627 echo "this status message may be disabled on the project admin page"
628 } | mailref "update@$cfg_gitweburl/$proj.git" -s "[$cfg_name] $proj update succeeded" "$mailaddrs" || :
629 bang_reset
632 if [ -n "$keep_bang_log" ] && [ -s "$bang_log" ]; then
633 cat "$bang_log" >.banglog
634 echo "" >>.banglog
635 echo "$keep_bang_log failed with error code $save_bang_errcode" >>.banglog
638 progress "- [$proj] update ($(date))"