update.sh: add unquarantine_updates function
[girocco.git] / jobd / update.sh
blob6fda7233cfea22f70eb42af8880ffc46a8f752d7
1 #!/bin/sh
3 . @basedir@/shlib.sh
4 . @basedir@/jobd/updategc-util-functions.sh
6 set -e
8 if [ $# -ne 1 ]; then
9 echo "Usage: update.sh projname" >&2
10 exit 1
13 # date -R is linux-only, POSIX equivalent is '+%a, %d %b %Y %T %z'
14 datefmt='+%a, %d %b %Y %T %z'
16 git_fetch_q_progress() {
17 if [ "${cfg_max_file_size512:-0}" != "0" ]; then
18 GIT_BIN="'$cfg_basedir/bin/ulimit512' -i -f '$cfg_max_file_size512' -- '$cfg_git_bin'" &&
19 export GIT_BIN
21 PATH="$var_git_exec_path:$cfg_basedir/bin:$PATH" @basedir@/jobd/git-fetch-q-progress.sh "$@"
24 # freshen_loose_objects full-sha ...
25 # if "$n" is a loose object, set its modification time to now
26 # otherwise silently do nothing with no error. To facilitate conversion
27 # of mirror projects to push projects we also add group write permission.
28 freshen_loose_objects() {
29 _list=
30 for _sha; do
31 _fn="${_sha#??}"
32 _shard="${_sha%$_fn}"
33 _list="$_list objects/$_shard/$_fn"
34 done
35 if [ -n "$_list" ]; then
36 chmod ug+w $_list 2>/dev/null || :
37 touch -c $_list 2>/dev/null || :
41 # darcs fast-export | git fast-import with error handling
42 git_darcs_fetch() (
43 set_utf8_locale
44 _err1=
45 _err2=
46 exec 3>&1
47 { read -r _err1 || :; read -r _err2 || :; } <<-EOT
49 exec 4>&3 3>&1 1>&4 4>&-
51 _e1=0
52 "$cfg_basedir"/bin/darcs-fast-export \
53 --export-marks="$(pwd)/dfe-marks" \
54 --import-marks="$(pwd)/dfe-marks" "$1" 3>&- || _e1=$?
55 echo $_e1 >&3
56 } |
58 _e2=0
59 git_ulimit fast-import \
60 --export-marks="$(pwd)/gfi-marks" \
61 --export-pack-edges="$(pwd)/gfi-packs" \
62 --import-marks="$(pwd)/gfi-marks" \
63 --force 3>&- || _e2=$?
64 echo $_e2 >&3
67 EOT
68 exec 3>&-
69 [ "$_err1" = 0 ] && [ "$_err2" = 0 ]
70 return $?
73 # bzr fast-export | git fast-import with error handling
74 git_bzr_fetch() (
75 set_utf8_locale
76 BZR_LOG=/dev/null
77 export BZR_LOG
78 _err1=
79 _err2=
80 exec 3>&1
81 { read -r _err1 || :; read -r _err2 || :; } <<-EOT
83 exec 4>&3 3>&1 1>&4 4>&-
85 _e1=0
86 bzr fast-export --plain \
87 --export-marks="$(pwd)/bfe-marks" \
88 --import-marks="$(pwd)/bfe-marks" "$1" 3>&- || _e1=$?
89 echo $_e1 >&3
90 } |
92 _e2=0
93 git_ulimit fast-import \
94 --export-marks="$(pwd)/gfi-marks" \
95 --export-pack-edges="$(pwd)/gfi-packs" \
96 --import-marks="$(pwd)/gfi-marks" \
97 --force 3>&- || _e2=$?
98 echo $_e2 >&3
102 exec 3>&-
103 [ "$_err1" = 0 ] && [ "$_err2" = 0 ]
104 return $?
107 # On return a "$lockf" will have been created that must be removed when gc is done
108 lock_update() {
109 v_lock_file _lockresult "update.pid" || {
110 echo >&2 "[$proj] $_lockresult"
111 exit 1
113 lockf="$_lockresult"
116 # output all loose object ids, one per line, to stdout INCLUDING "/" shard separator
117 # look in "$1" (default "objects")
118 list_loose_sharded_objects() (
119 cd "${1:-objects}" || return 1
120 objdirs="$(echo $octet)"
121 [ "$objdirs" != "$octet" ] || return 0
122 find -L $objdirs -mindepth 1 -maxdepth 1 -type f -name "$octet19*" -print
125 # Migrate any and all objects/packs/ref-changes from $incoming_fetch/$incoming_objs
126 # Does NOT do anything with reflogs, those should already be taken care of elsewhere
127 unquarantine_updates() {
128 # just make sure everthing is copacetic first
129 [ -n "$incoming_fetch" ] && [ -d "$incoming_fetch" ] &&
130 [ -n "$incoming_objs" ] && [ -d "$incoming_objs" ] || {
131 echo >&2 "[$proj] unquarantine failed"
132 exit 1
134 _ifd="$(git --git-dir="$incoming_fetch" rev-parse --git-dir 2>/dev/null)" || :
135 [ -z "$_ifd" ] || _ifd="$(cd "$_ifd" && pwd -P)"
136 [ -n "$_ifd" ] && [ "$_ifd" = "$incoming_fetch" ] || {
137 echo >&2 "[$proj] unquarantine failed"
138 exit 1
141 # both $incoming_fetch and . must have all their refs packed
142 git --git-dir="$incoming_fetch" pack-refs --all --prune
143 git --git-dir=. pack-refs --all --prune
145 # now every loose object and pack must be migrated out of quarantine
146 _objd="$(cd "$PWD/objects" && pwd -P)"
147 # packs first
148 if [ -d "$incoming_objs/pack" ]; then
149 if [ ! -d "$_objd/pack" ]; then
150 mkdir -p "$_objd/pack"
151 chmod "$dperms" "$_objd/pack" >/dev/null 2>&1 || :
153 while read -r _pckf && [ -n "${_pckf%.pack}" ]; do
154 _pckf="${_pckf%.pack}"
155 rename_pack "$_pckf" "$_objd/pack/${_pckf##*/}"
156 chmod "$fperms" "$_objd/pack/${_pckf##*/}".?* >/dev/null 2>&1 || :
157 done <<LIST_PACKS
158 $(list_packs --exclude-no-idx "$incoming_objs/pack")
159 LIST_PACKS
161 # now loose objects
162 # (use a subshell for loose objects as there could potentially be many
163 # whereas there will normally be just one pack)
164 list_loose_sharded_objects "$incoming_objs" |
165 while read -r _objf && [ -n "$_objf" ] && [ "${#_objf}" -ge 41 ]; do
166 if [ ! -d "$_objd/${_objf%%/*}" ]; then
167 mkdir -p "$_objd/${_objf%%/*}"
168 chmod "$dperms" "$_objd/${_objf%%/*}" >/dev/null 2>&1 || :
170 ln "$incoming_objs/$_objf" "$_objd/$_objf" >/dev/null 2>&1 ||
171 dupe_file "$incoming_objs/$_objf" "$_objd/$_objf" "tmp_obj_" >/dev/null 2>&1 ||
172 [ -f "$_objd/$_objf" ] || {
173 echo >&2 "[$proj] unable to unquarantine object $_objf"
174 exit 1
176 chmod "$fperms" "$_objd/$_objf" >/dev/null 2>&1 || :
177 rm -f "$incoming_objs/$_objf"
178 done || exit 1
180 # now the refs
181 # simply replace the packed-refs file
182 # but do it atomically and make sure it's on the same file system first
183 rm -f "$PWD/packed-refs.$$"
184 cat "$incoming_fetch/packed-refs" >"$PWD/packed-refs.$$"
185 mv -f "$PWD/packed-refs.$$" "$PWD/packed-refs"
186 rm -f "$PWD/packed-refs.$$"
189 [ -n "$cfg_mirror" ] || { echo "Mirroring is disabled" >&2; exit 0; }
191 if [ "$cfg_permission_control" != "Hooks" ]; then
192 umask 002
193 fperms=0664
194 dperms=2775
195 else
196 umask 000
197 fperms=0666
198 dperms=2777
200 clean_git_env
202 proj="${1%.git}"
203 cd "$cfg_reporoot/$proj.git"
205 # Activate a mini-gc if needed
206 # We do this here as well as after a successful fetch so that if we're stuck
207 # in a fetch loop where fetches are succeeding in fetching new packs but the
208 # ref update is failing for some reason (perhaps a non-commit under refs/heads)
209 # and a previous invokation therefore had a "bang" exit then we will still
210 # get the .needsgc flag set in a timely fashion to avoid excess pack build up.
211 check_and_set_needsgc
213 bang_log=
214 incoming_fetch=
215 incoming_objs=
216 lockf=
217 cleanup_exit() {
218 ec=$?
219 if [ $ec != 0 ]; then
220 echo "update failed dir: $PWD" >&2
222 [ -z "$incoming_fetch" ] || rm -rf "$incoming_fetch"
223 [ -z "$incoming_objs" ] || rm -rf "$incoming_objs"
224 [ -z "$bang_log" ] || rm -f "$bang_log"
225 [ -z "$lockf" ] || rm -f "$lockf"
227 trap 'cleanup_exit' EXIT
228 trap 'exit 129' HUP
229 trap 'exit 130' INT
230 trap 'exit 131' QUIT
231 trap 'exit 134' ABRT
232 trap 'exit 141' PIPE
233 trap 'exit 142' ALRM
234 trap 'exit 143' TERM
236 if [ "${force_update:-0}" = "0" ] && check_interval lastrefresh $cfg_min_mirror_interval; then
237 progress "= [$proj] update skip (last at $(config_get lastrefresh))"
238 exit 0
240 if [ -e .nofetch ]; then
241 progress "x [$proj] update disabled (.nofetch exists)"
242 exit 0
244 lock_update
245 progress "+ [$proj] update ($(date))"
247 # Any pre-existing FETCH_HEAD from a previous fetch, failed or not, is garbage
248 rm -f FETCH_HEAD
250 # Remove any stale ref locks
251 clear_stale_ref_locks
253 # Remove any stale incoming-* object quarantine directories that are
254 # more than 12 hours old. These are new with Git >= 2.11.0.
255 # But we also create our own during the fetch process as Git's quarantine
256 # only applies to incoming receive-pack which we imitate for our fetch.
257 find -L . objects -maxdepth 1 -type d -name 'incoming-?*' -mmin +720 \
258 -exec rm -rf '{}' + || :
260 # A previous failed update attempt can leave a huge tmp_pack_XXXXXX file behind.
261 # Since no pushes are allowed to mirrors, we know that any such files that exist
262 # at this point in time are garbage and can be safely deleted, we do not even
263 # need to check how old they are. A tmp_idx_XXXXXX file is also created during
264 # the later stages of the fetch process, so we kill any of those as well.
265 find -L objects/pack -maxdepth 1 -type f -name "tmp_pack_?*" -exec rm -f '{}' + || :
266 find -L objects/pack -maxdepth 1 -type f -name "tmp_idx_?*" -exec rm -f '{}' + || :
268 # Make sure we have a reflogs subdirectory and abort the update if not
269 # This should not count as a normal "bang" failure if unsuccessful
270 [ -d reflogs ] || mkdir -p reflogs >/dev/null 2>&1 || :
271 [ -d reflogs ]
273 # Create a "quarantine" area to fetch into
274 # This is set up similarly to the way the "repack" directory is set
275 # up for gc in that it's a subdirectory that's a whole "git" directory
276 # but it uses the existing objects directory as an alternate and its
277 # own objects subdirectory is a symlink to a subdirectory of the real
278 # objects directory (to guarantee that packs/objects can be moved rather
279 # than copied). It starts out with a copy of all of the project's refs.
280 # A successful fetch will "unquarantine" fetched objects/packs + ref changes
281 incoming_objs="$(mktemp -d "$PWD/objects/incoming-XXXXXX")"
282 incoming_objs="$(cd "$incoming_objs" && pwd -P)"
283 chmod "$dperms" "$incoming_objs"
284 mkdir "$incoming_objs/pack"
285 mkdir "$incoming_objs/info"
286 printf '%s\n' "$PWD/objects" >"$incoming_objs/info/alternates"
287 incoming_fetch="$(mktemp -d "$PWD/incoming-XXXXXX")"
288 incoming_fetch="$(cd "$incoming_fetch" && pwd -P)"
289 chmod "$dperms" "$incoming_fetch"
290 ln -s "$incoming_objs" "$incoming_fetch/objects"
291 mkdir "$incoming_fetch/refs"
292 ln -s "$PWD/config" "$incoming_fetch/config"
293 git for-each-ref --format='%(objectname) %(refname)' >"$incoming_fetch/packed-refs"
294 cat HEAD >"$incoming_fetch/HEAD"
295 # Make sure the incoming packed-refs file is properly peeled
296 git --git-dir="$incoming_fetch" pack-refs --all --prune
297 # link to svn if it exists
298 [ ! -d svn ] || ln -s "$PWD/svn" "$incoming_fetch/svn"
300 keep_bang_log=
301 do_check_after_refs=1
302 bang_setup
303 bang_action="update"
304 bang_trap() {
305 if [ -n "$1" ]; then
306 # Throttle retries
307 # Since gitweb shows the .last_refresh date, it's safe to update
308 # gitweb.lastrefresh to throttle the updates w/o corrupting the
309 # last refresh date display on the gitweb summary page
310 # It's therefore important that we do NOT touch .last_refresh here
311 config_set lastrefresh "$(date "$datefmt")"
315 bang echo "Project: $proj"
316 bang echo " Date: $(TZ=UTC date '+%Y-%m-%d %T UTC')"
317 bang echo ""
318 mail="$(config_get owner)" || :
319 url="$(config_get baseurl)" || :
320 case "$url" in *" "*|*" "*|"")
321 bang_eval 'echo "Bad mirror URL (\"$url\")"; ! :'
322 exit 1
323 esac
324 bang echo "Mirroring from URL \"$url\""
325 bang echo ""
326 statusok="$(git config --bool gitweb.statusupdates 2>/dev/null || echo true)"
327 mailaddrs=
328 [ "$statusok" = "false" ] || [ -z "$mail" ] || mailaddrs="$mail"
329 [ -z "$cfg_admincc" ] || [ "$cfg_admincc" = "0" ] || [ -z "$cfg_admin" ] ||
330 if [ -z "$mailaddrs" ]; then mailaddrs="$cfg_admin"; else mailaddrs="$mailaddrs,$cfg_admin"; fi
332 bang_eval "git for-each-ref --format '%(refname) %(objectname)' >.refs-temp"
333 bang_eval "LC_ALL=C sort -b -k1,1 <.refs-temp >.refs-before"
335 check_after_refs() {
336 [ -n "$do_check_after_refs" ] || return 0
337 bang_eval "git for-each-ref --format '%(refname) %(objectname)' >.refs-temp"
338 bang_eval "LC_ALL=C sort -b -k1,1 <.refs-temp >.refs-after"
339 refschanged=
340 cmp -s .refs-before .refs-after || refschanged=1
341 do_check_after_refs=
344 ! [ -e .delaygc ] || >.allowgc || :
346 # Make sure we don't get any unwanted loose objects
347 # Starting with Git v2.10.0 fast-import can generate loose objects unless we
348 # tweak its configuration to prevent that
349 git_add_config 'fetch.unpackLimit=1'
350 # Note the git config documentation is wrong
351 # transfer.unpackLimit, if set, overrides fetch.unpackLimit
352 git_add_config 'transfer.unpackLimit=1'
353 # But not the Git v2.10.0 and later fastimport.unpackLimit which improperly uses <= instead of <
354 git_add_config 'fastimport.unpackLimit=0'
356 # remember the starting time so we can easily combine fetched loose objects
357 # we sleep for 1 second after creating .needspack to make sure all objects are newer
358 if ! [ -e .needspack ]; then
359 rm -f .needspack
360 >.needspack
361 sleep 1
364 case "$url" in
365 svn://* | svn+http://* | svn+https://* | svn+file://* | svn+ssh://*)
366 [ -n "$cfg_mirror_svn" ] || { echo "Mirroring svn is disabled" >&2; exit 0; }
367 # Allow the username to be specified in the "svn-credential.svn.username"
368 # property and the password in the "svn-credential.svn.password" property
369 # Use an 'anonsvn' username by default as is commonly used for anonymous svn
370 # Default the password to the same as the username
371 # The password property will be ignored unless a username has been specified
372 if svnuser="$(git config --get svn-credential.svn.username)" && [ -n "$svnuser" ]; then
373 if ! svnpass="$(git config --get svn-credential.svn.password)"; then
374 svnpass="$svnuser"
376 url1="${url#*://}"
377 url1="${url1%%/*}"
378 case "$url1" in ?*"@"?*)
379 urlsch="${url%%://*}"
380 url="$urlsch://${url#*@}"
381 esac
382 else
383 # As a fallback, check in the URL, just in case
384 url1="${url#*://}"
385 url1="${url1%%/*}"
386 svnuser=
387 case "$url1" in ?*"@"?*)
388 urlsch="${url%%://*}"
389 url="$urlsch://${url#*@}"
390 url1="${url1%%@*}"
391 svnuser="${url1%%:*}"
392 if [ -n "$svnuser" ]; then
393 svnpass="$svnuser"
394 case "$url1" in *":"*)
395 svnpass="${url1#*:}"
396 esac
398 esac
399 if [ -z "$svnuser" ]; then
400 svnuser="anonsvn"
401 svnpass="anonsvn"
404 GIT_ASKPASS_PASSWORD="$svnpass"
405 export GIT_ASKPASS_PASSWORD
406 # Update the git svn url to match baseurl but be cognizant of any
407 # needed prefix changes. See the comments in taskd/clone.sh about
408 # why we need to put up with a prefix in the first place.
409 case "$url" in svn+ssh://*) svnurl="$url";; *) svnurl="${url#svn+}";; esac
410 svnurl="${svnurl%/}"
411 svnurlold="$(config_get svnurl)" || :
412 if [ "$svnurl" != "$svnurlold" ]; then
413 # We better already have an svn-remote.svn.fetch setting
414 bang test -n "$(git config --get-all svn-remote.svn.fetch)" || :
415 # the only way to truly know what the proper prefix is
416 # is to attempt a fresh git-svn init -s on the new url
417 rm -rf svn-new-url || :
418 # We require svn info to succeed on the URL otherwise it's
419 # simply not a valid URL and without using -s on the init it
420 # will not otherwise be tested until the fetch
421 bang eval 'svn --non-interactive --username "$svnuser" --password "$svnpass" info "$svnurl" >/dev/null'
422 bang mkdir svn-new-url
423 GIT_DIR=svn-new-url bang git init --bare --quiet
424 # We initially use -s for the init which will possibly shorten
425 # the URL. However, the shortening can fail if a password is
426 # not required for the longer version but is for the shorter,
427 # so try again without -s if the -s version fails.
428 cmdstr='git svn init --username="$svnuser" --prefix "" -s "$svnurl" <"$mtlinesfile" >/dev/null 2>&1 || '
429 cmdstr="$cmdstr"'git svn init --username="$svnuser" --prefix "" "$svnurl" <"$mtlinesfile" >/dev/null 2>&1'
430 GIT_DIR=svn-new-url bang eval "$cmdstr"
431 gitsvnurl="$(GIT_DIR=svn-new-url git config --get svn-remote.svn.url)" || :
432 gitsvnfetch="$(GIT_DIR=svn-new-url git config --get svn-remote.svn.fetch)" || :
433 gitsvnprefixnew="${gitsvnfetch%%:*}"
434 gitsvnsuffixnew="${gitsvnprefixnew##*/}"
435 gitsvnprefixnew="${gitsvnprefixnew%$gitsvnsuffixnew}"
436 rm -rf svn-new-url || :
437 # Using GIT_DIR= with bang leaves it set to svn-new-url, so reset it to .
438 GIT_DIR=.
439 if [ "$gitsvnurl" != "$(git config --get svn-remote.svn.url || :)" ]; then
440 # The url has been changed.
441 # We must update the url and replace the prefix on all config items
442 gitsvnfetch="$(git config --get-all svn-remote.svn.fetch | head -1)" || :
443 gitsvnprefixold="${gitsvnfetch%%:*}"
444 gitsvnsuffixold="${gitsvnprefixold##*/}"
445 gitsvnprefixold="${gitsvnprefixold%$gitsvnsuffixold}"
446 git config --remove-section 'svn-remote.svnnew' 2>/dev/null || :
447 git config 'svn-remote.svnnew.url' "$gitsvnurl"
448 git config --get-regexp '^svn-remote\.svn\.' |
449 while read -r sname sval; do
450 case "$sname" in
451 svn-remote.svn.fetch|svn-remote.svn.branches|svn-remote.svn.tags)
452 sname="${sname#svn-remote.svn.}"
453 sval="${sval#$gitsvnprefixold}"
454 bang git config --add "svn-remote.svnnew.$sname" "${gitsvnprefixnew}$sval"
455 esac
456 done
457 test $? -eq 0
458 bang git config -f svn/.metadata svn-remote.svn.reposRoot "$gitsvnurl"
459 bang git config --remove-section svn-remote.svn
460 bang git config --rename-section svn-remote.svnnew svn-remote.svn
462 bang config_set svnurl "$svnurl"
464 # remove any stale *.lock files greater than 1 hour old in case
465 # git-svn was killed on the last update because it took too long
466 find -L svn -type f -name '*.lock' -mmin +60 -exec rm -f '{}' + 2>/dev/null || :
467 GIROCCO_DIVERT_GIT_SVN_AUTO_GC=1
468 export GIROCCO_DIVERT_GIT_SVN_AUTO_GC
469 unset GIROCCO_SUPPRESS_AUTO_GC_UPDATE
470 saveconfig="$GIT_CONFIG_PARAMETERS"
471 git_add_config 'gc.auto=1'
472 git_add_config 'gc.autoPackLimit=1'
473 GIT_DIR=. bang git_ulimit svn fetch --log-window-size=$var_log_window_size --username="$svnuser" --quiet <"$mtlinesfile"
474 GIROCCO_SUPPRESS_AUTO_GC_UPDATE=1
475 export GIROCCO_SUPPRESS_AUTO_GC_UPDATE
476 unset GIROCCO_DIVERT_GIT_SVN_AUTO_GC
477 unset GIT_CONFIG_PARAMETERS
478 [ -z "$saveconfig" ] || {
479 GIT_CONFIG_PARAMETERS="$saveconfig"
480 export GIT_CONFIG_PARAMETERS
482 # git svn does not preserve group permissions in the svn subdirectory
483 chmod -R ug+rw,o+r svn
484 # git svn also leaves behind ref turds that end with @nnn
485 # We get rid of them now
486 git for-each-ref --format='%(refname)' |
487 LC_ALL=C sed '/^..*@[1-9][0-9]*$/!d; s/^/delete /' |
488 git_updateref_stdin
489 unset GIT_ASKPASS_PASSWORD
491 darcs://* | darcs+http://* | darcs+https://*)
492 [ -n "$cfg_mirror_darcs" ] || { echo "Mirroring darcs is disabled" >&2; exit 0; }
493 case "$url" in
494 darcs://*) darcsurl="http://${url#darcs://}";;
495 *) darcsurl="${url#darcs+}";;
496 esac
497 # remove any stale lock files greater than 1 hour old in case
498 # darcs_fast_export was killed on the last update because it took too long
499 find -L *.darcs -maxdepth 2 -type f -name 'lock' -mmin +60 -exec rm -f '{}' + 2>/dev/null || :
500 bang git_darcs_fetch "$darcsurl"
502 bzr://*)
503 [ -n "$cfg_mirror_bzr" ] || { echo "Mirroring bzr is disabled" >&2; exit 0; }
504 bzrurl="${url#bzr://}"
505 bang git_bzr_fetch "$bzrurl"
507 hg+http://* | hg+https://* | hg+file://* | hg+ssh://*)
508 [ -n "$cfg_mirror_hg" ] || { echo "Mirroring hg is disabled" >&2; exit 0; }
509 # We just remove hg+ here, so hg+http://... becomes http://...
510 hgurl="${url#hg+}"
511 # Fetch any new updates
512 bang hg -R "$(pwd)/repo.hg" pull
513 # Do the fast-export | fast-import
514 bang git_hg_fetch
517 [ "$url" = "$(git config --get remote.origin.url || :)" ] || bang config_set_raw remote.origin.url "$url"
518 pruneopt=--prune
519 [ "$(git config --bool fetch.prune 2>/dev/null || :)" != "false" ] || pruneopt=
520 if ! is_gfi_mirror_url "$url"; then
521 lastwasclean=
522 [ "$(git config --bool girocco.lastupdateclean 2>/dev/null || :)" != "true" ] || lastwasclean=1
523 nextisclean=
524 [ "$(git config --bool girocco.cleanmirror 2>/dev/null || :)" != "true" ] || nextisclean=1
525 if [ "$nextisclean" != "$lastwasclean" ]; then
526 if [ -n "$nextisclean" ]; then
527 git config --replace-all remote.origin.fetch "+refs/heads/*:refs/heads/*"
528 git config --add remote.origin.fetch "+refs/tags/*:refs/tags/*"
529 git config --add remote.origin.fetch "+refs/notes/*:refs/notes/*"
530 git config --add remote.origin.fetch "+refs/top-bases/*:refs/top-bases/*"
531 else
532 git config --replace-all remote.origin.fetch "+refs/*:refs/*"
536 # remember the starting time so we can easily detect new packs for fast-import mirrors
537 # we sleep for 1 second after creating .gfipack to make sure all packs are newer
538 if is_gfi_mirror_url "$url" && [ ! -e .gfipack ]; then
539 rm -f .gfipack
540 >.gfipack
541 sleep 1
543 fetcharg="default"
544 git config remotes.default >/dev/null 2>&1 || fetcharg="--all"
545 fetchcmd="git_ulimit fetch"
546 [ "$show_progress" != "0" ] || fetchcmd="git_ulimit fetch -q"
547 if [ -n "$var_have_git_171" ] && [ "${show_progress:-0}" != "0" ]; then
548 # git fetch learned --progress in v1.7.1
549 case "$show_progress" in
550 [2-9]*|1[0-9]*)
551 # full volume progress with all the spammy noise
552 fetchcmd="git_ulimit fetch --progress"
555 # a kinder, gentler progress that doesn't leave one
556 # covered all over in exploded bits of spam afterwards
557 fetchcmd="git_fetch_q_progress"
559 esac
561 # It's possible for a fetch to actually do something while still returning
562 # a non-zero result (perhaps some of the refs were updated but some were
563 # not -- a malicious Git-impersonation trying to set refs/heads/... refs
564 # to non-commit objects for example).
565 GIT_SSL_NO_VERIFY=1 bang_catch eval "$fetchcmd" $pruneopt --multiple "$fetcharg"
566 # If we did fetch anything, don't treat it as an error, but do keep the log;
567 # otherwise invoke bang_failed as for a normal failure
568 if [ "${bang_errcode:-0}" != "0" ]; then
569 save_bang_errcode="$bang_errcode"
570 check_after_refs
571 if [ -n "$refschanged" ]; then
572 keep_bang_log="git fetch${pruneopt:+ $pruneopt} --multiple $fetcharg"
573 else
574 bang_cmd="git fetch${pruneopt:+ $pruneopt} --multiple $fetcharg"
575 bang_errcode="$save_bang_errcode"
576 bang_failed
579 if ! is_gfi_mirror_url "$url" && [ "$nextisclean" != "$lastwasclean" ]; then
580 if [ -n "$nextisclean" ]; then
581 # We must manually purge the unclean refs now as even prune won't do it
582 git for-each-ref --format='%(refname)' |
583 LC_ALL=C sed \
584 -e '/^refs\/heads\//d' \
585 -e '/^refs\/tags\//d' \
586 -e '/^refs\/notes\//d' \
587 -e '/^refs\/top-bases\//d' \
588 -e 's/^/delete /' |
589 git_updateref_stdin
591 git config --bool girocco.lastupdateclean ${nextisclean:-0}
593 if [ -e .gfipack ] && is_gfi_mirror_url "$url"; then
594 find -L objects/pack -type f -newer .gfipack -name "pack-$octet20*.pack" -print >>gfi-packs
595 rm -f .gfipack
598 esac
600 # The objects subdirectories permissions must be updated now.
601 # In the case of a dumb http clone, the permissions will not be correct
602 # (missing group write) despite the core.sharedrepository=1 setting!
603 # The objects themselves seem to have the correct permissions.
604 # This problem appears to have been fixed in the most recent git versions.
605 perms=g+w
606 [ "$cfg_permission_control" != "Hooks" ] || perms=go+w
607 chmod $perms $(find -L objects -maxdepth 1 -type d) 2>/dev/null || :
609 # We maintain the last refresh date in two places deliberately
610 # so that it's available as part of the config data and also
611 # as a standalone file timestamp that can be accessed without git.
612 bang config_set lastrefresh "$(date "$datefmt")"
613 { >.last_refresh; } 2>/dev/null || :
615 # Check to see if any refs changed
616 check_after_refs
618 # Update server info if any refs changed (if they didn't packs shouldn't have either)
619 [ -z "$refschanged" ] || bang git update-server-info
621 # Pack all refs if any changed to keep things as efficient as possible
622 # Project mirror updates do not occur that often therefore this is a win
623 # However, if pack-refs fails for some reason, we can just ignore and continue
624 # The "--prune" option is the default since v1.5.0 but it serves as "documentation" here
625 [ -z "$refschanged" ] || git pack-refs --all --prune || :
627 # Force a mini-gc if $Girocco::Config::delay_gfi_redelta is false and there's
628 # at least one gfi pack present now
629 if [ -z "$cfg_delay_gfi_redelta" ] && ! [ -e .needsgc ] &&
630 [ -f gfi-packs ] && [ -s gfi-packs ] && is_gfi_mirror_url "$url"; then
631 >.needsgc
634 # Activate a mini-gc if needed
635 check_and_set_needsgc
637 # Look at which refs changed and trigger ref-change for these
638 sockpath="$cfg_chroot/etc/taskd.socket"
639 if [ -n "$refschanged" ]; then
640 bang config_set lastreceive "$(date '+%a, %d %b %Y %T %z')"
641 # We always use UTC for the log timestamp so that chroot and non-chroot match up.
642 # We don't have to worry about multiple log files since only one update runs
643 lognamets="$(TZ=UTC date '+%Y%m%d_%H%M%S')"
644 loghhmmss="${lognamets##*_}"
645 logname="reflogs/${lognamets%%_*}"
646 # We freshen the mod time to now on any old or new ref that is a loose object
647 # For old refs we do it so we will be able to keep them around for 1 day
648 # For new refs we do it in case we are about to run gc and the new ref
649 # actually points to an oldish loose object that had been unreachable
650 # We probably do not need to do it for new refs as Git tries to do that,
651 # but since we're already doing it for old refs (which Git does not do),
652 # it's almost no extra work for new refs, just in case.
654 echo "ref-changes %$proj% $proj"
655 LC_ALL=C join .refs-before .refs-after |
656 LC_ALL=C sed -e '/^[^ ][^ ]* \([^ ][^ ]*\) \1$/d' |
657 while read ref old new; do
658 echo "$loghhmmss $old $new $ref" >&3
659 freshen_loose_objects "$old" "$new"
660 echo "$old $new $ref"
661 done
662 LC_ALL=C join -v 1 .refs-before .refs-after |
663 while read ref old; do
664 echo "$loghhmmss $old 0000000000000000000000000000000000000000 $ref" >&3
665 freshen_loose_objects "$old"
666 echo "$old 0000000000000000000000000000000000000000 $ref"
667 done
668 LC_ALL=C join -v 2 .refs-before .refs-after |
669 while read ref new; do
670 echo "$loghhmmss 0000000000000000000000000000000000000000 $new $ref" >&3
671 freshen_loose_objects "$new"
672 echo "0000000000000000000000000000000000000000 $new $ref"
673 done
674 git for-each-ref --format='%(objectname) %(objectname) %(refname)' refs/heads
675 echo "done ref-changes %$proj% $proj"
676 } >.refs-temp 3>>"$logname"
677 if [ -S "$sockpath" ]; then
678 trap ':' PIPE
679 nc_openbsd -w 15 -U "$sockpath" <.refs-temp || :
680 trap - PIPE
682 bang config_set lastchange "$(date '+%a, %d %b %Y %T %z')"
683 bang_eval "git for-each-ref --sort=-committerdate --format='%(committerdate:iso8601)' \
684 --count=1 refs/heads >info/lastactivity"
685 ! [ -d htmlcache ] || { >htmlcache/changed; } 2>/dev/null || :
686 rm -f .delaygc .allowgc
688 [ "${cfg_autogchack:-0}" != "0" ] &&
689 [ "$(git config --get --bool girocco.autogchack 2>/dev/null)" != "false" ]
690 then
691 mv -f .refs-after .refs-last
695 # If the repository does not yet have a valid HEAD symref try to set one
696 # If an empty repository was cloned and then later becomes unempty you just
697 # lose out on the fancy "symref=HEAD:" logic and get this version instead
698 check_and_set_head || :
700 rm -f .refs-before .refs-after .refs-temp FETCH_HEAD
702 if is_banged; then
703 [ -z "$mailaddrs" ] || ! was_banged_message_sent ||
705 echo "$proj update succeeded - failure recovery"
706 echo "this status message may be disabled on the project admin page"
707 } | mailref "update@$cfg_gitweburl/$proj.git" -s "[$cfg_name] $proj update succeeded" "$mailaddrs" || :
708 bang_reset
711 if [ -n "$keep_bang_log" ] && [ -s "$bang_log" ]; then
712 cat "$bang_log" >.banglog
713 echo "" >>.banglog
714 echo "$keep_bang_log failed with error code $save_bang_errcode" >>.banglog
717 progress "- [$proj] update ($(date))"