jobd/update.sh: make sure to cleanup on bang failure
[girocco.git] / jobd / update.sh
blob0f0ce2dea44abc3b0a7bee5904fa4861c2bcfb2f
1 #!/bin/sh
3 . @basedir@/shlib.sh
4 . @basedir@/jobd/updategc-util-functions.sh
6 set -e
8 if [ $# -ne 1 ]; then
9 echo "Usage: update.sh projname" >&2
10 exit 1
13 # date -R is linux-only, POSIX equivalent is '+%a, %d %b %Y %T %z'
14 datefmt='+%a, %d %b %Y %T %z'
16 git_fetch_q_progress() {
17 if [ "${cfg_max_file_size512:-0}" != "0" ]; then
18 GIT_BIN="'$cfg_basedir/bin/ulimit512' -i -f '$cfg_max_file_size512' -- '$cfg_git_bin'" &&
19 export GIT_BIN
21 PATH="$var_git_exec_path:$cfg_basedir/bin:$PATH" @basedir@/jobd/git-fetch-q-progress.sh "$@"
24 # freshen_loose_objects full-sha ...
25 # if "$n" is a loose object, set its modification time to now
26 # otherwise silently do nothing with no error. To facilitate conversion
27 # of mirror projects to push projects we also add group write permission.
28 freshen_loose_objects() {
29 _list=
30 for _sha; do
31 _fn="${_sha#??}"
32 _shard="${_sha%$_fn}"
33 _list="$_list objects/$_shard/$_fn"
34 done
35 if [ -n "$_list" ]; then
36 chmod ug+w $_list 2>/dev/null || :
37 touch -c $_list 2>/dev/null || :
41 # darcs fast-export | git fast-import with error handling
42 git_darcs_fetch() (
43 set_utf8_locale
44 _err1=
45 _err2=
46 exec 3>&1
47 { read -r _err1 || :; read -r _err2 || :; } <<-EOT
49 exec 4>&3 3>&1 1>&4 4>&-
51 _e1=0
52 "$cfg_basedir"/bin/darcs-fast-export \
53 --export-marks="$(pwd)/dfe-marks" \
54 --import-marks="$(pwd)/dfe-marks" "$1" 3>&- || _e1=$?
55 echo $_e1 >&3
56 } |
58 _e2=0
59 git_ulimit fast-import \
60 --export-marks="$(pwd)/gfi-marks" \
61 --export-pack-edges="$(pwd)/gfi-packs" \
62 --import-marks="$(pwd)/gfi-marks" \
63 --force 3>&- || _e2=$?
64 echo $_e2 >&3
67 EOT
68 exec 3>&-
69 [ "$_err1" = 0 ] && [ "$_err2" = 0 ]
70 return $?
73 # bzr fast-export | git fast-import with error handling
74 git_bzr_fetch() (
75 set_utf8_locale
76 BZR_LOG=/dev/null
77 export BZR_LOG
78 _err1=
79 _err2=
80 exec 3>&1
81 { read -r _err1 || :; read -r _err2 || :; } <<-EOT
83 exec 4>&3 3>&1 1>&4 4>&-
85 _e1=0
86 bzr fast-export --plain \
87 --export-marks="$(pwd)/bfe-marks" \
88 --import-marks="$(pwd)/bfe-marks" "$1" 3>&- || _e1=$?
89 echo $_e1 >&3
90 } |
92 _e2=0
93 git_ulimit fast-import \
94 --export-marks="$(pwd)/gfi-marks" \
95 --export-pack-edges="$(pwd)/gfi-packs" \
96 --import-marks="$(pwd)/gfi-marks" \
97 --force 3>&- || _e2=$?
98 echo $_e2 >&3
102 exec 3>&-
103 [ "$_err1" = 0 ] && [ "$_err2" = 0 ]
104 return $?
107 # On return a "$lockf" will have been created that must be removed when gc is done
108 lock_update() {
109 v_lock_file _lockresult "update.pid" || {
110 echo >&2 "[$proj] $_lockresult"
111 exit 1
113 lockf="$_lockresult"
116 # output all loose object ids, one per line, to stdout INCLUDING "/" shard separator
117 # look in "$1" (default "objects")
118 list_loose_sharded_objects() (
119 cd "${1:-objects}" || return 1
120 objdirs="$(echo $octet)"
121 [ "$objdirs" != "$octet" ] || return 0
122 find -L $objdirs -mindepth 1 -maxdepth 1 -type f -name "$octet19*" -print
125 # Migrate any and all objects/packs/ref-changes from $incoming_fetch/$incoming_objs
126 # Does NOT do anything with reflogs, those should already be taken care of elsewhere
127 unquarantine_updates() {
128 # just make sure everthing is copacetic first
129 [ -n "$incoming_fetch" ] && [ -d "$incoming_fetch" ] &&
130 [ -n "$incoming_objs" ] && [ -d "$incoming_objs" ] || {
131 echo >&2 "[$proj] unquarantine failed"
132 exit 1
134 _ifd="$(git --git-dir="$incoming_fetch" rev-parse --git-dir 2>/dev/null)" || :
135 [ -z "$_ifd" ] || _ifd="$(cd "$_ifd" && pwd -P)"
136 [ -n "$_ifd" ] && [ "$_ifd" = "$incoming_fetch" ] || {
137 echo >&2 "[$proj] unquarantine failed"
138 exit 1
141 # both $incoming_fetch and . must have all their refs packed
142 git --git-dir="$incoming_fetch" pack-refs --all --prune
143 git --git-dir=. pack-refs --all --prune
145 # now every loose object and pack must be migrated out of quarantine
146 _objd="$(cd "$PWD/objects" && pwd -P)"
147 # packs first
148 if [ -d "$incoming_objs/pack" ]; then
149 if [ ! -d "$_objd/pack" ]; then
150 mkdir -p "$_objd/pack"
151 chmod "$dperms" "$_objd/pack" >/dev/null 2>&1 || :
153 while read -r _pckf && [ -n "${_pckf%.pack}" ]; do
154 _pckf="${_pckf%.pack}"
155 rename_pack "$_pckf" "$_objd/pack/${_pckf##*/}"
156 chmod "$fperms" "$_objd/pack/${_pckf##*/}".?* >/dev/null 2>&1 || :
157 done <<LIST_PACKS
158 $(list_packs --exclude-no-idx "$incoming_objs/pack")
159 LIST_PACKS
161 # now loose objects
162 # (use a subshell for loose objects as there could potentially be many
163 # whereas there will normally be just one pack)
164 list_loose_sharded_objects "$incoming_objs" |
165 while read -r _objf && [ -n "$_objf" ] && [ "${#_objf}" -ge 41 ]; do
166 if [ ! -d "$_objd/${_objf%%/*}" ]; then
167 mkdir -p "$_objd/${_objf%%/*}"
168 chmod "$dperms" "$_objd/${_objf%%/*}" >/dev/null 2>&1 || :
170 ln "$incoming_objs/$_objf" "$_objd/$_objf" >/dev/null 2>&1 ||
171 dupe_file "$incoming_objs/$_objf" "$_objd/$_objf" "tmp_obj_" >/dev/null 2>&1 ||
172 [ -f "$_objd/$_objf" ] || {
173 echo >&2 "[$proj] unable to unquarantine object $_objf"
174 exit 1
176 chmod "$fperms" "$_objd/$_objf" >/dev/null 2>&1 || :
177 rm -f "$incoming_objs/$_objf"
178 done || exit 1
180 # now the refs
181 # simply replace the packed-refs file
182 # but do it atomically and make sure it's on the same file system first
183 rm -f "$PWD/packed-refs.$$"
184 cat "$incoming_fetch/packed-refs" >"$PWD/packed-refs.$$"
185 mv -f "$PWD/packed-refs.$$" "$PWD/packed-refs"
186 rm -f "$PWD/packed-refs.$$"
189 # Create a "quarantine" area to fetch into
190 # This is set up similarly to the way the "repack" directory is set
191 # up for gc in that it's a subdirectory that's a whole "git" directory
192 # but it uses the existing objects directory as an alternate and its
193 # own objects subdirectory is a symlink to a subdirectory of the real
194 # objects directory (to guarantee that packs/objects can be moved rather
195 # than copied). It starts out with a copy of all of the project's refs.
196 # A successful fetch will "unquarantine" fetched objects/packs + ref changes
197 create_quarantine() {
198 incoming_objs="$(mktemp -d "$PWD/objects/incoming-XXXXXX")"
199 incoming_objs="$(cd "$incoming_objs" && pwd -P)"
200 chmod "$dperms" "$incoming_objs"
201 mkdir "$incoming_objs/pack"
202 mkdir "$incoming_objs/info"
203 printf '%s\n' "$PWD/objects" >"$incoming_objs/info/alternates"
204 incoming_fetch="$(mktemp -d "$PWD/incoming-XXXXXX")"
205 incoming_fetch="$(cd "$incoming_fetch" && pwd -P)"
206 chmod "$dperms" "$incoming_fetch"
207 ln -s "$incoming_objs" "$incoming_fetch/objects"
208 mkdir "$incoming_fetch/refs"
209 ln -s "$PWD/config" "$incoming_fetch/config"
210 git for-each-ref --format='%(objectname) %(refname)' >"$incoming_fetch/packed-refs"
211 cat HEAD >"$incoming_fetch/HEAD"
212 # Make sure the incoming packed-refs file is properly peeled
213 git --git-dir="$incoming_fetch" pack-refs --all --prune
214 # link to svn if it exists
215 [ ! -d svn ] || ln -s "$PWD/svn" "$incoming_fetch/svn"
216 use_quarantine=1
219 [ -n "$cfg_mirror" ] || { echo "Mirroring is disabled" >&2; exit 0; }
221 if [ "$cfg_permission_control" != "Hooks" ]; then
222 umask 002
223 fperms=0664
224 dperms=2775
225 else
226 umask 000
227 fperms=0666
228 dperms=2777
230 clean_git_env
232 proj="${1%.git}"
233 cd "$cfg_reporoot/$proj.git"
235 # Activate a mini-gc if needed
236 # We do this here as well as after a successful fetch so that if we're stuck
237 # in a fetch loop where fetches are succeeding in fetching new packs but the
238 # ref update is failing for some reason (perhaps a non-commit under refs/heads)
239 # and a previous invokation therefore had a "bang" exit then we will still
240 # get the .needsgc flag set in a timely fashion to avoid excess pack build up.
241 check_and_set_needsgc
243 use_quarantine=
244 bang_log=
245 incoming_fetch=
246 incoming_objs=
247 lockf=
248 cleanup_exit() {
249 ec=$?
250 if [ $ec != 0 ]; then
251 echo "update failed dir: $PWD" >&2
253 [ -z "$incoming_fetch" ] || rm -rf "$incoming_fetch"
254 [ -z "$incoming_objs" ] || rm -rf "$incoming_objs"
255 [ -z "$bang_log" ] || rm -f "$bang_log"
256 [ -z "$lockf" ] || rm -f "$lockf"
258 bang_exit() { cleanup_exit; }
259 trap 'cleanup_exit' EXIT
260 trap 'exit 129' HUP
261 trap 'exit 130' INT
262 trap 'exit 131' QUIT
263 trap 'exit 134' ABRT
264 trap 'exit 141' PIPE
265 trap 'exit 142' ALRM
266 trap 'exit 143' TERM
268 if [ "${force_update:-0}" = "0" ] && check_interval lastrefresh $cfg_min_mirror_interval; then
269 progress "= [$proj] update skip (last at $(config_get lastrefresh))"
270 exit 0
272 if [ -e .nofetch ]; then
273 progress "x [$proj] update disabled (.nofetch exists)"
274 exit 0
276 lock_update
277 progress "+ [$proj] update ($(date))"
279 # Any pre-existing FETCH_HEAD from a previous fetch, failed or not, is garbage
280 rm -f FETCH_HEAD
282 # Remove any stale ref locks
283 clear_stale_ref_locks
285 # Remove any stale incoming-* object quarantine directories that are
286 # more than 12 hours old. These are new with Git >= 2.11.0.
287 # But we also create our own during the fetch process as Git's quarantine
288 # only applies to incoming receive-pack which we imitate for our fetch.
289 find -L . objects -maxdepth 1 -type d -name 'incoming-?*' -mmin +720 \
290 -exec rm -rf '{}' + || :
292 # A previous failed update attempt can leave a huge tmp_pack_XXXXXX file behind.
293 # Since no pushes are allowed to mirrors, we know that any such files that exist
294 # at this point in time are garbage and can be safely deleted, we do not even
295 # need to check how old they are. A tmp_idx_XXXXXX file is also created during
296 # the later stages of the fetch process, so we kill any of those as well.
297 find -L objects/pack -maxdepth 1 -type f -name "tmp_pack_?*" -exec rm -f '{}' + || :
298 find -L objects/pack -maxdepth 1 -type f -name "tmp_idx_?*" -exec rm -f '{}' + || :
300 # Make sure we have a reflogs subdirectory and abort the update if not
301 # This should not count as a normal "bang" failure if unsuccessful
302 [ -d reflogs ] || mkdir -p reflogs >/dev/null 2>&1 || :
303 [ -d reflogs ]
305 keep_bang_log=
306 do_check_after_refs=1
307 bang_setup
308 bang_action="update"
309 bang_trap() {
310 if [ -n "$1" ]; then
311 # Throttle retries
312 # Since gitweb shows the .last_refresh date, it's safe to update
313 # gitweb.lastrefresh to throttle the updates w/o corrupting the
314 # last refresh date display on the gitweb summary page
315 # It's therefore important that we do NOT touch .last_refresh here
316 config_set lastrefresh "$(date "$datefmt")"
320 bang echo "Project: $proj"
321 bang echo " Date: $(TZ=UTC date '+%Y-%m-%d %T UTC')"
322 bang echo ""
323 mail="$(config_get owner)" || :
324 url="$(config_get baseurl)" || :
325 case "$url" in *" "*|*" "*|"")
326 bang_eval 'echo "Bad mirror URL (\"$url\")"; ! :'
327 exit 1
328 esac
329 bang echo "Mirroring from URL \"$url\""
330 bang echo ""
331 statusok="$(git config --bool gitweb.statusupdates 2>/dev/null || echo true)"
332 mailaddrs=
333 [ "$statusok" = "false" ] || [ -z "$mail" ] || mailaddrs="$mail"
334 [ -z "$cfg_admincc" ] || [ "$cfg_admincc" = "0" ] || [ -z "$cfg_admin" ] ||
335 if [ -z "$mailaddrs" ]; then mailaddrs="$cfg_admin"; else mailaddrs="$mailaddrs,$cfg_admin"; fi
337 bang_eval "git for-each-ref --format '%(refname) %(objectname)' >.refs-temp"
338 bang_eval "LC_ALL=C sort -b -k1,1 <.refs-temp >.refs-before"
340 check_after_refs() {
341 [ -n "$do_check_after_refs" ] || return 0
342 bang_eval "git for-each-ref --format '%(refname) %(objectname)' >.refs-temp"
343 bang_eval "LC_ALL=C sort -b -k1,1 <.refs-temp >.refs-after"
344 refschanged=
345 cmp -s .refs-before .refs-after || refschanged=1
346 do_check_after_refs=
349 ! [ -e .delaygc ] || >.allowgc || :
351 # Make sure we don't get any unwanted loose objects
352 # Starting with Git v2.10.0 fast-import can generate loose objects unless we
353 # tweak its configuration to prevent that
354 git_add_config 'fetch.unpackLimit=1'
355 # Note the git config documentation is wrong
356 # transfer.unpackLimit, if set, overrides fetch.unpackLimit
357 git_add_config 'transfer.unpackLimit=1'
358 # But not the Git v2.10.0 and later fastimport.unpackLimit which improperly uses <= instead of <
359 git_add_config 'fastimport.unpackLimit=0'
361 # remember the starting time so we can easily combine fetched loose objects
362 # we sleep for 1 second after creating .needspack to make sure all objects are newer
363 if ! [ -e .needspack ]; then
364 rm -f .needspack
365 >.needspack
366 sleep 1
369 case "$url" in
370 svn://* | svn+http://* | svn+https://* | svn+file://* | svn+ssh://*)
371 [ -n "$cfg_mirror_svn" ] || { echo "Mirroring svn is disabled" >&2; exit 0; }
372 # Allow the username to be specified in the "svn-credential.svn.username"
373 # property and the password in the "svn-credential.svn.password" property
374 # Use an 'anonsvn' username by default as is commonly used for anonymous svn
375 # Default the password to the same as the username
376 # The password property will be ignored unless a username has been specified
377 if svnuser="$(git config --get svn-credential.svn.username)" && [ -n "$svnuser" ]; then
378 if ! svnpass="$(git config --get svn-credential.svn.password)"; then
379 svnpass="$svnuser"
381 url1="${url#*://}"
382 url1="${url1%%/*}"
383 case "$url1" in ?*"@"?*)
384 urlsch="${url%%://*}"
385 url="$urlsch://${url#*@}"
386 esac
387 else
388 # As a fallback, check in the URL, just in case
389 url1="${url#*://}"
390 url1="${url1%%/*}"
391 svnuser=
392 case "$url1" in ?*"@"?*)
393 urlsch="${url%%://*}"
394 url="$urlsch://${url#*@}"
395 url1="${url1%%@*}"
396 svnuser="${url1%%:*}"
397 if [ -n "$svnuser" ]; then
398 svnpass="$svnuser"
399 case "$url1" in *":"*)
400 svnpass="${url1#*:}"
401 esac
403 esac
404 if [ -z "$svnuser" ]; then
405 svnuser="anonsvn"
406 svnpass="anonsvn"
409 GIT_ASKPASS_PASSWORD="$svnpass"
410 export GIT_ASKPASS_PASSWORD
411 # Update the git svn url to match baseurl but be cognizant of any
412 # needed prefix changes. See the comments in taskd/clone.sh about
413 # why we need to put up with a prefix in the first place.
414 case "$url" in svn+ssh://*) svnurl="$url";; *) svnurl="${url#svn+}";; esac
415 svnurl="${svnurl%/}"
416 svnurlold="$(config_get svnurl)" || :
417 if [ "$svnurl" != "$svnurlold" ]; then
418 # We better already have an svn-remote.svn.fetch setting
419 bang test -n "$(git config --get-all svn-remote.svn.fetch)" || :
420 # the only way to truly know what the proper prefix is
421 # is to attempt a fresh git-svn init -s on the new url
422 rm -rf svn-new-url || :
423 # We require svn info to succeed on the URL otherwise it's
424 # simply not a valid URL and without using -s on the init it
425 # will not otherwise be tested until the fetch
426 bang eval 'svn --non-interactive --username "$svnuser" --password "$svnpass" info "$svnurl" >/dev/null'
427 bang mkdir svn-new-url
428 GIT_DIR=svn-new-url bang git init --bare --quiet
429 # We initially use -s for the init which will possibly shorten
430 # the URL. However, the shortening can fail if a password is
431 # not required for the longer version but is for the shorter,
432 # so try again without -s if the -s version fails.
433 cmdstr='git svn init --username="$svnuser" --prefix "" -s "$svnurl" <"$mtlinesfile" >/dev/null 2>&1 || '
434 cmdstr="$cmdstr"'git svn init --username="$svnuser" --prefix "" "$svnurl" <"$mtlinesfile" >/dev/null 2>&1'
435 GIT_DIR=svn-new-url bang eval "$cmdstr"
436 gitsvnurl="$(GIT_DIR=svn-new-url git config --get svn-remote.svn.url)" || :
437 gitsvnfetch="$(GIT_DIR=svn-new-url git config --get svn-remote.svn.fetch)" || :
438 gitsvnprefixnew="${gitsvnfetch%%:*}"
439 gitsvnsuffixnew="${gitsvnprefixnew##*/}"
440 gitsvnprefixnew="${gitsvnprefixnew%$gitsvnsuffixnew}"
441 rm -rf svn-new-url || :
442 # Using GIT_DIR= with bang leaves it set to svn-new-url, so reset it to .
443 GIT_DIR=.
444 if [ "$gitsvnurl" != "$(git config --get svn-remote.svn.url || :)" ]; then
445 # The url has been changed.
446 # We must update the url and replace the prefix on all config items
447 gitsvnfetch="$(git config --get-all svn-remote.svn.fetch | head -1)" || :
448 gitsvnprefixold="${gitsvnfetch%%:*}"
449 gitsvnsuffixold="${gitsvnprefixold##*/}"
450 gitsvnprefixold="${gitsvnprefixold%$gitsvnsuffixold}"
451 git config --remove-section 'svn-remote.svnnew' 2>/dev/null || :
452 git config 'svn-remote.svnnew.url' "$gitsvnurl"
453 git config --get-regexp '^svn-remote\.svn\.' |
454 while read -r sname sval; do
455 case "$sname" in
456 svn-remote.svn.fetch|svn-remote.svn.branches|svn-remote.svn.tags)
457 sname="${sname#svn-remote.svn.}"
458 sval="${sval#$gitsvnprefixold}"
459 bang git config --add "svn-remote.svnnew.$sname" "${gitsvnprefixnew}$sval"
460 esac
461 done
462 test $? -eq 0
463 bang git config -f svn/.metadata svn-remote.svn.reposRoot "$gitsvnurl"
464 bang git config --remove-section svn-remote.svn
465 bang git config --rename-section svn-remote.svnnew svn-remote.svn
467 bang config_set svnurl "$svnurl"
469 # remove any stale *.lock files greater than 1 hour old in case
470 # git-svn was killed on the last update because it took too long
471 find -L svn -type f -name '*.lock' -mmin +60 -exec rm -f '{}' + 2>/dev/null || :
472 GIROCCO_DIVERT_GIT_SVN_AUTO_GC=1
473 export GIROCCO_DIVERT_GIT_SVN_AUTO_GC
474 unset GIROCCO_SUPPRESS_AUTO_GC_UPDATE
475 saveconfig="$GIT_CONFIG_PARAMETERS"
476 git_add_config 'gc.auto=1'
477 git_add_config 'gc.autoPackLimit=1'
478 GIT_DIR=. bang git_ulimit svn fetch --log-window-size=$var_log_window_size --username="$svnuser" --quiet <"$mtlinesfile"
479 GIROCCO_SUPPRESS_AUTO_GC_UPDATE=1
480 export GIROCCO_SUPPRESS_AUTO_GC_UPDATE
481 unset GIROCCO_DIVERT_GIT_SVN_AUTO_GC
482 unset GIT_CONFIG_PARAMETERS
483 [ -z "$saveconfig" ] || {
484 GIT_CONFIG_PARAMETERS="$saveconfig"
485 export GIT_CONFIG_PARAMETERS
487 # git svn does not preserve group permissions in the svn subdirectory
488 chmod -R ug+rw,o+r svn
489 # git svn also leaves behind ref turds that end with @nnn
490 # We get rid of them now
491 git for-each-ref --format='%(refname)' |
492 LC_ALL=C sed '/^..*@[1-9][0-9]*$/!d; s/^/delete /' |
493 git_updateref_stdin
494 unset GIT_ASKPASS_PASSWORD
496 darcs://* | darcs+http://* | darcs+https://*)
497 [ -n "$cfg_mirror_darcs" ] || { echo "Mirroring darcs is disabled" >&2; exit 0; }
498 case "$url" in
499 darcs://*) darcsurl="http://${url#darcs://}";;
500 *) darcsurl="${url#darcs+}";;
501 esac
502 # remove any stale lock files greater than 1 hour old in case
503 # darcs_fast_export was killed on the last update because it took too long
504 find -L *.darcs -maxdepth 2 -type f -name 'lock' -mmin +60 -exec rm -f '{}' + 2>/dev/null || :
505 bang git_darcs_fetch "$darcsurl"
507 bzr://*)
508 [ -n "$cfg_mirror_bzr" ] || { echo "Mirroring bzr is disabled" >&2; exit 0; }
509 bzrurl="${url#bzr://}"
510 bang git_bzr_fetch "$bzrurl"
512 hg+http://* | hg+https://* | hg+file://* | hg+ssh://*)
513 [ -n "$cfg_mirror_hg" ] || { echo "Mirroring hg is disabled" >&2; exit 0; }
514 # We just remove hg+ here, so hg+http://... becomes http://...
515 hgurl="${url#hg+}"
516 # Fetch any new updates
517 bang hg -R "$(pwd)/repo.hg" pull
518 # Do the fast-export | fast-import
519 bang git_hg_fetch
522 [ "$url" = "$(git config --get remote.origin.url || :)" ] || bang config_set_raw remote.origin.url "$url"
523 pruneopt=--prune
524 [ "$(git config --bool fetch.prune 2>/dev/null || :)" != "false" ] || pruneopt=
525 if ! is_gfi_mirror_url "$url"; then
526 lastwasclean=
527 [ "$(git config --bool girocco.lastupdateclean 2>/dev/null || :)" != "true" ] || lastwasclean=1
528 nextisclean=
529 [ "$(git config --bool girocco.cleanmirror 2>/dev/null || :)" != "true" ] || nextisclean=1
530 if [ "$nextisclean" != "$lastwasclean" ]; then
531 if [ -n "$nextisclean" ]; then
532 git config --replace-all remote.origin.fetch "+refs/heads/*:refs/heads/*"
533 git config --add remote.origin.fetch "+refs/tags/*:refs/tags/*"
534 git config --add remote.origin.fetch "+refs/notes/*:refs/notes/*"
535 git config --add remote.origin.fetch "+refs/top-bases/*:refs/top-bases/*"
536 else
537 git config --replace-all remote.origin.fetch "+refs/*:refs/*"
540 if ! create_quarantine; then
541 bang echo ""
542 bang echo "unable to create fetch quarantine area"
543 bang_cmd="create_quarantine"
544 bang_errcode=1
545 bang_failed
548 # remember the starting time so we can easily detect new packs for fast-import mirrors
549 # we sleep for 1 second after creating .gfipack to make sure all packs are newer
550 if is_gfi_mirror_url "$url" && [ ! -e .gfipack ]; then
551 rm -f .gfipack
552 >.gfipack
553 sleep 1
555 fetcharg="default"
556 git config remotes.default >/dev/null 2>&1 || fetcharg="--all"
557 GIT_DIR=.
558 [ -z "$use_quarantine" ] || GIT_DIR="$incoming_fetch"
559 fetchcmd="git_ulimit fetch"
560 [ "$show_progress" != "0" ] || fetchcmd="git_ulimit fetch -q"
561 if [ -n "$var_have_git_171" ] && [ "${show_progress:-0}" != "0" ]; then
562 # git fetch learned --progress in v1.7.1
563 case "$show_progress" in
564 [2-9]*|1[0-9]*)
565 # full volume progress with all the spammy noise
566 fetchcmd="git_ulimit fetch --progress"
569 # a kinder, gentler progress that doesn't leave one
570 # covered all over in exploded bits of spam afterwards
571 fetchcmd="git_fetch_q_progress"
573 esac
575 # It's possible for a fetch to actually do something while still returning
576 # a non-zero result (perhaps some of the refs were updated but some were
577 # not -- a malicious Git-impersonation trying to set refs/heads/... refs
578 # to non-commit objects for example).
579 GIT_SSL_NO_VERIFY=1 bang_catch eval "$fetchcmd" $pruneopt --multiple "$fetcharg"
580 unset GIT_SSL_NO_VERIFY
581 # If we did fetch anything, don't treat it as an error, but do keep the log;
582 # otherwise invoke bang_failed as for a normal failure
583 if [ "${bang_errcode:-0}" != "0" ]; then
584 save_bang_errcode="$bang_errcode"
585 check_after_refs
586 if [ -n "$refschanged" ]; then
587 keep_bang_log="git fetch${pruneopt:+ $pruneopt} --multiple $fetcharg"
588 else
589 bang_cmd="git fetch${pruneopt:+ $pruneopt} --multiple $fetcharg"
590 bang_errcode="$save_bang_errcode"
591 bang_failed
594 if [ -n "$use_quarantine" ]; then
595 check_after_refs
596 unset GIT_DIR
597 if [ -n "$refschanged" ] && ! unquarantine_updates; then
598 bang echo ""
599 bang echo "unable to unquarantine fetched updates"
600 bang_cmd="unquarantine_updates"
601 bang_errcode=1
602 bang_failed
604 else
605 unset GIT_DIR
607 if ! is_gfi_mirror_url "$url" && [ "$nextisclean" != "$lastwasclean" ]; then
608 if [ -n "$nextisclean" ]; then
609 # We must manually purge the unclean refs now as even prune won't do it
610 git for-each-ref --format='%(refname)' |
611 LC_ALL=C sed \
612 -e '/^refs\/heads\//d' \
613 -e '/^refs\/tags\//d' \
614 -e '/^refs\/notes\//d' \
615 -e '/^refs\/top-bases\//d' \
616 -e 's/^/delete /' |
617 git_updateref_stdin
619 git config --bool girocco.lastupdateclean ${nextisclean:-0}
621 if [ -e .gfipack ] && is_gfi_mirror_url "$url"; then
622 find -L objects/pack -type f -newer .gfipack -name "pack-$octet20*.pack" -print >>gfi-packs
623 rm -f .gfipack
626 esac
628 # The objects subdirectories permissions must be updated now.
629 # In the case of a dumb http clone, the permissions will not be correct
630 # (missing group write) despite the core.sharedrepository=1 setting!
631 # The objects themselves seem to have the correct permissions.
632 # This problem appears to have been fixed in the most recent git versions.
633 perms=g+w
634 [ "$cfg_permission_control" != "Hooks" ] || perms=go+w
635 chmod $perms $(find -L objects -maxdepth 1 -type d) 2>/dev/null || :
637 # We maintain the last refresh date in two places deliberately
638 # so that it's available as part of the config data and also
639 # as a standalone file timestamp that can be accessed without git.
640 bang config_set lastrefresh "$(date "$datefmt")"
641 { >.last_refresh; } 2>/dev/null || :
643 # Check to see if any refs changed
644 check_after_refs
646 # Update server info if any refs changed (if they didn't packs shouldn't have either)
647 [ -z "$refschanged" ] || bang git update-server-info
649 # Pack all refs if any changed to keep things as efficient as possible
650 # Project mirror updates do not occur that often therefore this is a win
651 # However, if pack-refs fails for some reason, we can just ignore and continue
652 # The "--prune" option is the default since v1.5.0 but it serves as "documentation" here
653 [ -z "$refschanged" ] || git pack-refs --all --prune || :
655 # Force a mini-gc if $Girocco::Config::delay_gfi_redelta is false and there's
656 # at least one gfi pack present now
657 if [ -z "$cfg_delay_gfi_redelta" ] && ! [ -e .needsgc ] &&
658 [ -f gfi-packs ] && [ -s gfi-packs ] && is_gfi_mirror_url "$url"; then
659 >.needsgc
662 # Activate a mini-gc if needed
663 check_and_set_needsgc
665 # Look at which refs changed and trigger ref-change for these
666 sockpath="$cfg_chroot/etc/taskd.socket"
667 if [ -n "$refschanged" ]; then
668 bang config_set lastreceive "$(date '+%a, %d %b %Y %T %z')"
669 # We always use UTC for the log timestamp so that chroot and non-chroot match up.
670 # We don't have to worry about multiple log files since only one update runs
671 lognamets="$(TZ=UTC date '+%Y%m%d_%H%M%S')"
672 loghhmmss="${lognamets##*_}"
673 logname="reflogs/${lognamets%%_*}"
674 # We freshen the mod time to now on any old or new ref that is a loose object
675 # For old refs we do it so we will be able to keep them around for 1 day
676 # For new refs we do it in case we are about to run gc and the new ref
677 # actually points to an oldish loose object that had been unreachable
678 # We probably do not need to do it for new refs as Git tries to do that,
679 # but since we're already doing it for old refs (which Git does not do),
680 # it's almost no extra work for new refs, just in case.
682 echo "ref-changes %$proj% $proj"
683 LC_ALL=C join .refs-before .refs-after |
684 LC_ALL=C sed -e '/^[^ ][^ ]* \([^ ][^ ]*\) \1$/d' |
685 while read ref old new; do
686 echo "$loghhmmss $old $new $ref" >&3
687 freshen_loose_objects "$old" "$new"
688 echo "$old $new $ref"
689 done
690 LC_ALL=C join -v 1 .refs-before .refs-after |
691 while read ref old; do
692 echo "$loghhmmss $old 0000000000000000000000000000000000000000 $ref" >&3
693 freshen_loose_objects "$old"
694 echo "$old 0000000000000000000000000000000000000000 $ref"
695 done
696 LC_ALL=C join -v 2 .refs-before .refs-after |
697 while read ref new; do
698 echo "$loghhmmss 0000000000000000000000000000000000000000 $new $ref" >&3
699 freshen_loose_objects "$new"
700 echo "0000000000000000000000000000000000000000 $new $ref"
701 done
702 git for-each-ref --format='%(objectname) %(objectname) %(refname)' refs/heads
703 echo "done ref-changes %$proj% $proj"
704 } >.refs-temp 3>>"$logname"
705 if [ -S "$sockpath" ]; then
706 trap ':' PIPE
707 nc_openbsd -w 15 -U "$sockpath" <.refs-temp || :
708 trap - PIPE
710 bang config_set lastchange "$(date '+%a, %d %b %Y %T %z')"
711 bang_eval "git for-each-ref --sort=-committerdate --format='%(committerdate:iso8601)' \
712 --count=1 refs/heads >info/lastactivity"
713 ! [ -d htmlcache ] || { >htmlcache/changed; } 2>/dev/null || :
714 rm -f .delaygc .allowgc
716 [ "${cfg_autogchack:-0}" != "0" ] &&
717 [ "$(git config --get --bool girocco.autogchack 2>/dev/null)" != "false" ]
718 then
719 mv -f .refs-after .refs-last
723 # If the repository does not yet have a valid HEAD symref try to set one
724 # If an empty repository was cloned and then later becomes unempty you just
725 # lose out on the fancy "symref=HEAD:" logic and get this version instead
726 check_and_set_head || :
728 rm -f .refs-before .refs-after .refs-temp FETCH_HEAD
730 if is_banged; then
731 [ -z "$mailaddrs" ] || ! was_banged_message_sent ||
733 echo "$proj update succeeded - failure recovery"
734 echo "this status message may be disabled on the project admin page"
735 } | mailref "update@$cfg_gitweburl/$proj.git" -s "[$cfg_name] $proj update succeeded" "$mailaddrs" || :
736 bang_reset
739 if [ -n "$keep_bang_log" ] && [ -s "$bang_log" ]; then
740 cat "$bang_log" >.banglog
741 echo "" >>.banglog
742 echo "$keep_bang_log failed with error code $save_bang_errcode" >>.banglog
745 progress "- [$proj] update ($(date))"