Merge branch 'master' into rorcz
[girocco.git] / jobd / update.sh
blob8e765e01ea83cc6d3caffac41f008896d03d35d1
1 #!/bin/sh
3 . @basedir@/shlib.sh
4 . @basedir@/jobd/updategc-util-functions.sh
6 set -e
8 if [ $# -ne 1 ]; then
9 echo "Usage: update.sh projname" >&2
10 exit 1
13 # date -R is linux-only, POSIX equivalent is '+%a, %d %b %Y %T %z'
14 datefmt='+%a, %d %b %Y %T %z'
16 git_fetch_q_progress() (
17 _setexport_gitvars
18 if [ "${cfg_max_file_size512:-0}" != "0" ]; then
19 GIT_BIN="'$cfg_basedir/bin/ulimit512' -i -f '$cfg_max_file_size512' -- '$cfg_git_bin'" &&
20 export GIT_BIN
22 PATH="$var_git_exec_path:$cfg_basedir/bin:$PATH"
23 export PATH
24 exec @basedir@/jobd/git-fetch-q-progress.sh "$@"
27 # freshen_loose_objects full-sha ...
28 # if "$n" is a loose object, set its modification time to now
29 # otherwise silently do nothing with no error. To facilitate conversion
30 # of mirror projects to push projects we also add group write permission.
31 freshen_loose_objects() {
32 _list=
33 for _sha; do
34 _fn="${_sha#??}"
35 _shard="${_sha%$_fn}"
36 _list="$_list objects/$_shard/$_fn"
37 done
38 if [ -n "$_list" ]; then
39 chmod ug+w $_list 2>/dev/null || :
40 touch -c $_list 2>/dev/null || :
44 # darcs fast-export | git fast-import with error handling
45 git_darcs_fetch() (
46 set_utf8_locale
47 _err1=
48 _err2=
49 exec 3>&1
50 { read -r _err1 || :; read -r _err2 || :; } <<-EOT
52 exec 4>&3 3>&1 1>&4 4>&-
54 _e1=0
55 "$cfg_basedir"/bin/darcs-fast-export \
56 --export-marks="$(pwd)/dfe-marks" \
57 --import-marks="$(pwd)/dfe-marks" "$1" 3>&- || _e1=$?
58 echo $_e1 >&3
59 } |
61 _e2=0
62 git_ulimit fast-import \
63 --export-marks="$(pwd)/gfi-marks" \
64 --export-pack-edges="$(pwd)/gfi-packs" \
65 --import-marks="$(pwd)/gfi-marks" \
66 --force 3>&- || _e2=$?
67 echo $_e2 >&3
70 EOT
71 exec 3>&-
72 [ "$_err1" = 0 ] && [ "$_err2" = 0 ]
73 return $?
76 # bzr fast-export | git fast-import with error handling
77 git_bzr_fetch() (
78 set_utf8_locale
79 BZR_LOG=/dev/null
80 export BZR_LOG
81 _err1=
82 _err2=
83 exec 3>&1
84 { read -r _err1 || :; read -r _err2 || :; } <<-EOT
86 exec 4>&3 3>&1 1>&4 4>&-
88 _e1=0
89 bzr fast-export --plain \
90 --export-marks="$(pwd)/bfe-marks" \
91 --import-marks="$(pwd)/bfe-marks" "$1" 3>&- || _e1=$?
92 echo $_e1 >&3
93 } |
95 _e2=0
96 git_ulimit fast-import \
97 --export-marks="$(pwd)/gfi-marks" \
98 --export-pack-edges="$(pwd)/gfi-packs" \
99 --import-marks="$(pwd)/gfi-marks" \
100 --force 3>&- || _e2=$?
101 echo $_e2 >&3
105 exec 3>&-
106 [ "$_err1" = 0 ] && [ "$_err2" = 0 ]
107 return $?
110 # On return a "$lockf" will have been created that must be removed when gc is done
111 lock_update() {
112 v_lock_file _lockresult "update.pid" || {
113 echo >&2 "[$proj] $_lockresult"
114 exit 1
116 lockf="$_lockresult"
119 # output all loose object ids, one per line, to stdout INCLUDING "/" shard separator
120 # look in "$1" (default "objects")
121 list_loose_sharded_objects() (
122 cd "${1:-objects}" || return 1
123 objdirs="$(echo $octet)"
124 [ "$objdirs" != "$octet" ] || return 0
125 find -L $objdirs -mindepth 1 -maxdepth 1 -type f -name "$octet19*" -print
128 # Migrate any and all objects/packs/ref-changes from $incoming_fetch/$incoming_objs
129 # Does NOT do anything with reflogs, those should already be taken care of elsewhere
130 unquarantine_updates() {
131 # just make sure everthing is copacetic first
132 [ -n "$incoming_fetch" ] && [ -d "$incoming_fetch" ] &&
133 [ -n "$incoming_objs" ] && [ -d "$incoming_objs" ] || {
134 echo >&2 "[$proj] unquarantine failed"
135 exit 1
137 _ifd="$(git --git-dir="$incoming_fetch" rev-parse --git-dir 2>/dev/null)" || :
138 [ -z "$_ifd" ] || _ifd="$(cd "$_ifd" && pwd -P)"
139 [ -n "$_ifd" ] && [ "$_ifd" = "$incoming_fetch" ] || {
140 echo >&2 "[$proj] unquarantine failed"
141 exit 1
144 # both $incoming_fetch and . must have all their refs packed
145 git --git-dir="$incoming_fetch" pack-refs --all --prune
146 git --git-dir=. pack-refs --all --prune
148 # now every loose object and pack must be migrated out of quarantine
149 _objd="$(cd "$PWD/objects" && pwd -P)"
150 # packs first
151 if [ -d "$incoming_objs/pack" ]; then
152 if [ ! -d "$_objd/pack" ]; then
153 mkdir -p "$_objd/pack"
154 chmod "$dperms" "$_objd/pack" >/dev/null 2>&1 || :
156 while read -r _pckf && [ -n "${_pckf%.pack}" ]; do
157 _pckf="${_pckf%.pack}"
158 rename_pack "$_pckf" "$_objd/pack/${_pckf##*/}"
159 chmod "$fperms" "$_objd/pack/${_pckf##*/}".?* >/dev/null 2>&1 || :
160 done <<LIST_PACKS
161 $(list_packs --exclude-no-idx "$incoming_objs/pack")
162 LIST_PACKS
164 # now loose objects
165 # (use a subshell for loose objects as there could potentially be many
166 # whereas there will normally be just one pack)
167 list_loose_sharded_objects "$incoming_objs" |
168 while read -r _objf && [ -n "$_objf" ] && [ "${#_objf}" -ge 41 ]; do
169 if [ ! -d "$_objd/${_objf%%/*}" ]; then
170 mkdir -p "$_objd/${_objf%%/*}"
171 chmod "$dperms" "$_objd/${_objf%%/*}" >/dev/null 2>&1 || :
173 ln "$incoming_objs/$_objf" "$_objd/$_objf" >/dev/null 2>&1 ||
174 dupe_file "$incoming_objs/$_objf" "$_objd/$_objf" "tmp_obj_" >/dev/null 2>&1 ||
175 [ -f "$_objd/$_objf" ] || {
176 echo >&2 "[$proj] unable to unquarantine object $_objf"
177 exit 1
179 chmod "$fperms" "$_objd/$_objf" >/dev/null 2>&1 || :
180 rm -f "$incoming_objs/$_objf"
181 done || exit 1
183 # now the refs
184 # simply replace the packed-refs file
185 # but do it atomically and make sure it's on the same file system first
186 rm -f "$PWD/packed-refs.$$"
187 cat "$incoming_fetch/packed-refs" >"$PWD/packed-refs.$$"
188 mv -f "$PWD/packed-refs.$$" "$PWD/packed-refs"
189 rm -f "$PWD/packed-refs.$$"
192 # Create a "quarantine" area to fetch into
193 # This is set up similarly to the way the "repack" directory is set
194 # up for gc in that it's a subdirectory that's a whole "git" directory
195 # but it uses the existing objects directory as an alternate and its
196 # own objects subdirectory is a symlink to a subdirectory of the real
197 # objects directory (to guarantee that packs/objects can be moved rather
198 # than copied). It starts out with a copy of all of the project's refs.
199 # A successful fetch will "unquarantine" fetched objects/packs + ref changes
200 create_quarantine() {
201 incoming_objs="$(mktemp -d "$PWD/objects/incoming-XXXXXX")"
202 incoming_objs="$(cd "$incoming_objs" && pwd -P)"
203 chmod "$dperms" "$incoming_objs"
204 mkdir "$incoming_objs/pack"
205 mkdir "$incoming_objs/info"
206 printf '%s\n' "$PWD/objects" >"$incoming_objs/info/alternates"
207 incoming_fetch="$(mktemp -d "$PWD/incoming-XXXXXX")"
208 incoming_fetch="$(cd "$incoming_fetch" && pwd -P)"
209 chmod "$dperms" "$incoming_fetch"
210 ln -s "$incoming_objs" "$incoming_fetch/objects"
211 mkdir "$incoming_fetch/refs"
212 ln -s "$PWD/config" "$incoming_fetch/config"
213 git for-each-ref --format='%(objectname) %(refname)' >"$incoming_fetch/packed-refs"
214 cat HEAD >"$incoming_fetch/HEAD"
215 # Make sure the incoming packed-refs file is properly peeled
216 git --git-dir="$incoming_fetch" pack-refs --all --prune
217 # link to svn if it exists
218 [ ! -d svn ] || ln -s "$PWD/svn" "$incoming_fetch/svn"
219 use_quarantine=1
222 [ -n "$cfg_mirror" ] || { echo "Mirroring is disabled" >&2; exit 0; }
224 if [ "$cfg_permission_control" != "Hooks" ]; then
225 umask 002
226 fperms=0664
227 dperms=2775
228 else
229 umask 000
230 fperms=0666
231 dperms=2777
233 clean_git_env
235 proj="${1%.git}"
236 cd "$cfg_reporoot/$proj.git"
238 # Activate a mini-gc if needed
239 # We do this here as well as after a successful fetch so that if we're stuck
240 # in a fetch loop where fetches are succeeding in fetching new packs but the
241 # ref update is failing for some reason (perhaps a non-commit under refs/heads)
242 # and a previous invokation therefore had a "bang" exit then we will still
243 # get the .needsgc flag set in a timely fashion to avoid excess pack build up.
244 check_and_set_needsgc
246 use_quarantine=
247 bang_log=
248 incoming_fetch=
249 incoming_objs=
250 lockf=
251 cleanup_exit() {
252 ec=$?
253 if [ $ec != 0 ]; then
254 echo "update failed dir: $PWD" >&2
256 [ -z "$incoming_fetch" ] || rm -rf "$incoming_fetch"
257 [ -z "$incoming_objs" ] || rm -rf "$incoming_objs"
258 [ -z "$bang_log" ] || rm -f "$bang_log"
259 [ -z "$lockf" ] || rm -f "$lockf"
261 bang_exit() { cleanup_exit; }
262 trap 'cleanup_exit' EXIT
263 trap 'exit 129' HUP
264 trap 'exit 130' INT
265 trap 'exit 131' QUIT
266 trap 'exit 134' ABRT
267 trap 'exit 141' PIPE
268 trap 'exit 142' ALRM
269 trap 'exit 143' TERM
271 if [ "${force_update:-0}" = "0" ] && check_interval lastrefresh $cfg_min_mirror_interval; then
272 progress "= [$proj] update skip (last at $(config_get lastrefresh))"
273 exit 0
275 if [ -e .nofetch ]; then
276 progress "x [$proj] update disabled (.nofetch exists)"
277 exit 0
279 lock_update
280 progress "+ [$proj] update ($(date))"
282 # Any pre-existing FETCH_HEAD from a previous fetch, failed or not, is garbage
283 rm -f FETCH_HEAD
285 # Remove any stale ref locks
286 clear_stale_ref_locks
288 # Remove any stale incoming-* object quarantine directories that are
289 # more than 12 hours old. These are new with Git >= 2.11.0.
290 # But we also create our own during the fetch process as Git's quarantine
291 # only applies to incoming receive-pack which we imitate for our fetch.
292 find -L . objects -maxdepth 1 -type d -name 'incoming-?*' -mmin +720 \
293 -exec rm -rf '{}' + || :
295 # A previous failed update attempt can leave a huge tmp_pack_XXXXXX file behind.
296 # Since no pushes are allowed to mirrors, we know that any such files that exist
297 # at this point in time are garbage and can be safely deleted, we do not even
298 # need to check how old they are. A tmp_idx_XXXXXX file is also created during
299 # the later stages of the fetch process, so we kill any of those as well.
300 find -L objects/pack -maxdepth 1 -type f -name "tmp_pack_?*" -exec rm -f '{}' + || :
301 find -L objects/pack -maxdepth 1 -type f -name "tmp_idx_?*" -exec rm -f '{}' + || :
303 # Make sure we have a reflogs subdirectory and abort the update if not
304 # This should not count as a normal "bang" failure if unsuccessful
305 [ -d reflogs ] || mkdir -p reflogs >/dev/null 2>&1 || :
306 [ -d reflogs ]
308 keep_bang_log=
309 do_check_after_refs=1
310 bang_setup
311 bang_action="update"
312 bang_trap() {
313 if [ -n "$1" ]; then
314 # Throttle retries
315 # Since gitweb shows the .last_refresh date, it's safe to update
316 # gitweb.lastrefresh to throttle the updates w/o corrupting the
317 # last refresh date display on the gitweb summary page
318 # It's therefore important that we do NOT touch .last_refresh here
319 config_set lastrefresh "$(date "$datefmt")"
323 bang echo "Project: $proj"
324 bang echo " Date: $(TZ=UTC date '+%Y-%m-%d %T UTC')"
325 bang echo ""
326 mail="$(config_get owner)" || :
327 url="$(config_get baseurl)" || :
328 case "$url" in *" "*|*" "*|"")
329 bang_eval 'echo "Bad mirror URL (\"$url\")"; ! :'
330 exit 1
331 esac
332 bang echo "Mirroring from URL \"$url\""
333 bang echo ""
334 statusok="$(git config --bool gitweb.statusupdates 2>/dev/null || echo true)"
335 mailaddrs=
336 [ "$statusok" = "false" ] || [ -z "$mail" ] || mailaddrs="$mail"
337 [ -z "$cfg_admincc" ] || [ "$cfg_admincc" = "0" ] || [ -z "$cfg_admin" ] ||
338 if [ -z "$mailaddrs" ]; then mailaddrs="$cfg_admin"; else mailaddrs="$mailaddrs,$cfg_admin"; fi
340 bang_eval "git for-each-ref --format '%(refname) %(objectname)' >.refs-temp"
341 bang_eval "LC_ALL=C sort -b -k1,1 <.refs-temp >.refs-before"
343 check_after_refs() {
344 [ -n "$do_check_after_refs" ] || return 0
345 bang_eval "git for-each-ref --format '%(refname) %(objectname)' >.refs-temp"
346 bang_eval "LC_ALL=C sort -b -k1,1 <.refs-temp >.refs-after"
347 refschanged=
348 cmp -s .refs-before .refs-after || refschanged=1
349 do_check_after_refs=
352 ! [ -e .delaygc ] || >.allowgc || :
354 # Make sure we don't get any unwanted loose objects
355 # Starting with Git v2.10.0 fast-import can generate loose objects unless we
356 # tweak its configuration to prevent that
357 git_add_config 'fetch.unpackLimit=1'
358 # Note the git config documentation is wrong
359 # transfer.unpackLimit, if set, overrides fetch.unpackLimit
360 git_add_config 'transfer.unpackLimit=1'
361 # But not the Git v2.10.0 and later fastimport.unpackLimit which improperly uses <= instead of <
362 git_add_config 'fastimport.unpackLimit=0'
364 # remember the starting time so we can easily combine fetched loose objects
365 # we sleep for 1 second after creating .needspack to make sure all objects are newer
366 if ! [ -e .needspack ]; then
367 rm -f .needspack
368 >.needspack
369 sleep 1
372 case "$url" in
373 svn://* | svn+http://* | svn+https://* | svn+file://* | svn+ssh://*)
374 [ -n "$cfg_mirror_svn" ] || { echo "Mirroring svn is disabled" >&2; exit 0; }
375 # Allow the username to be specified in the "svn-credential.svn.username"
376 # property and the password in the "svn-credential.svn.password" property
377 # Use an 'anonsvn' username by default as is commonly used for anonymous svn
378 # Default the password to the same as the username
379 # The password property will be ignored unless a username has been specified
380 if svnuser="$(git config --get svn-credential.svn.username)" && [ -n "$svnuser" ]; then
381 if ! svnpass="$(git config --get svn-credential.svn.password)"; then
382 svnpass="$svnuser"
384 url1="${url#*://}"
385 url1="${url1%%/*}"
386 case "$url1" in ?*"@"?*)
387 urlsch="${url%%://*}"
388 url="$urlsch://${url#*@}"
389 esac
390 else
391 # As a fallback, check in the URL, just in case
392 url1="${url#*://}"
393 url1="${url1%%/*}"
394 svnuser=
395 case "$url1" in ?*"@"?*)
396 urlsch="${url%%://*}"
397 url="$urlsch://${url#*@}"
398 url1="${url1%%@*}"
399 svnuser="${url1%%:*}"
400 if [ -n "$svnuser" ]; then
401 svnpass="$svnuser"
402 case "$url1" in *":"*)
403 svnpass="${url1#*:}"
404 esac
406 esac
407 if [ -z "$svnuser" ]; then
408 svnuser="anonsvn"
409 svnpass="anonsvn"
412 GIT_ASKPASS_PASSWORD="$svnpass"
413 export GIT_ASKPASS_PASSWORD
414 # Update the git svn url to match baseurl but be cognizant of any
415 # needed prefix changes. See the comments in taskd/clone.sh about
416 # why we need to put up with a prefix in the first place.
417 case "$url" in svn+ssh://*) svnurl="$url";; *) svnurl="${url#svn+}";; esac
418 svnurl="${svnurl%/}"
419 svnurlold="$(config_get svnurl)" || :
420 if [ "$svnurl" != "$svnurlold" ]; then
421 # We better already have an svn-remote.svn.fetch setting
422 bang test -n "$(git config --get-all svn-remote.svn.fetch)" || :
423 # the only way to truly know what the proper prefix is
424 # is to attempt a fresh git-svn init -s on the new url
425 rm -rf svn-new-url || :
426 # We require svn info to succeed on the URL otherwise it's
427 # simply not a valid URL and without using -s on the init it
428 # will not otherwise be tested until the fetch
429 bang eval 'svn --non-interactive --username "$svnuser" --password "$svnpass" info "$svnurl" >/dev/null'
430 bang mkdir svn-new-url
431 GIT_DIR=svn-new-url bang git init --bare --quiet
432 # We initially use -s for the init which will possibly shorten
433 # the URL. However, the shortening can fail if a password is
434 # not required for the longer version but is for the shorter,
435 # so try again without -s if the -s version fails.
436 cmdstr='git svn init --username="$svnuser" --prefix "" -s "$svnurl" <"$mtlinesfile" >/dev/null 2>&1 || '
437 cmdstr="$cmdstr"'git svn init --username="$svnuser" --prefix "" "$svnurl" <"$mtlinesfile" >/dev/null 2>&1'
438 GIT_DIR=svn-new-url bang eval "$cmdstr"
439 gitsvnurl="$(GIT_DIR=svn-new-url git config --get svn-remote.svn.url)" || :
440 gitsvnfetch="$(GIT_DIR=svn-new-url git config --get svn-remote.svn.fetch)" || :
441 gitsvnprefixnew="${gitsvnfetch%%:*}"
442 gitsvnsuffixnew="${gitsvnprefixnew##*/}"
443 gitsvnprefixnew="${gitsvnprefixnew%$gitsvnsuffixnew}"
444 rm -rf svn-new-url || :
445 # Using GIT_DIR= with bang leaves it set to svn-new-url, so reset it to .
446 GIT_DIR=.
447 if [ "$gitsvnurl" != "$(git config --get svn-remote.svn.url || :)" ]; then
448 # The url has been changed.
449 # We must update the url and replace the prefix on all config items
450 gitsvnfetch="$(git config --get-all svn-remote.svn.fetch | head -1)" || :
451 gitsvnprefixold="${gitsvnfetch%%:*}"
452 gitsvnsuffixold="${gitsvnprefixold##*/}"
453 gitsvnprefixold="${gitsvnprefixold%$gitsvnsuffixold}"
454 git config --remove-section 'svn-remote.svnnew' 2>/dev/null || :
455 git config 'svn-remote.svnnew.url' "$gitsvnurl"
456 git config --get-regexp '^svn-remote\.svn\.' |
457 while read -r sname sval; do
458 case "$sname" in
459 svn-remote.svn.fetch|svn-remote.svn.branches|svn-remote.svn.tags)
460 sname="${sname#svn-remote.svn.}"
461 sval="${sval#$gitsvnprefixold}"
462 bang git config --add "svn-remote.svnnew.$sname" "${gitsvnprefixnew}$sval"
463 esac
464 done
465 test $? -eq 0
466 bang git config -f svn/.metadata svn-remote.svn.reposRoot "$gitsvnurl"
467 bang git config --remove-section svn-remote.svn
468 bang git config --rename-section svn-remote.svnnew svn-remote.svn
470 bang config_set svnurl "$svnurl"
472 # remove any stale *.lock files greater than 1 hour old in case
473 # git-svn was killed on the last update because it took too long
474 find -L svn -type f -name '*.lock' -mmin +60 -exec rm -f '{}' + 2>/dev/null || :
475 GIROCCO_DIVERT_GIT_SVN_AUTO_GC=1
476 export GIROCCO_DIVERT_GIT_SVN_AUTO_GC
477 unset GIROCCO_SUPPRESS_AUTO_GC_UPDATE
478 saveconfig="$GIT_CONFIG_PARAMETERS"
479 git_add_config 'gc.auto=1'
480 git_add_config 'gc.autoPackLimit=1'
481 GIT_DIR=. bang git_ulimit svn fetch --log-window-size=$var_log_window_size --username="$svnuser" --quiet <"$mtlinesfile"
482 GIROCCO_SUPPRESS_AUTO_GC_UPDATE=1
483 export GIROCCO_SUPPRESS_AUTO_GC_UPDATE
484 unset GIROCCO_DIVERT_GIT_SVN_AUTO_GC
485 unset GIT_CONFIG_PARAMETERS
486 [ -z "$saveconfig" ] || {
487 GIT_CONFIG_PARAMETERS="$saveconfig"
488 export GIT_CONFIG_PARAMETERS
490 # git svn does not preserve group permissions in the svn subdirectory
491 chmod -R ug+rw,o+r svn
492 # git svn also leaves behind ref turds that end with @nnn
493 # We get rid of them now
494 git for-each-ref --format='%(refname)' |
495 LC_ALL=C sed '/^..*@[1-9][0-9]*$/!d; s/^/delete /' |
496 git_updateref_stdin
497 # handle old-style svn setup if it exists
498 if [ -n "$(git config --get remote.origin.url || :)" ]; then
499 GIT_DIR=. bang git fetch
501 unset GIT_ASKPASS_PASSWORD
503 darcs://* | darcs+http://* | darcs+https://*)
504 [ -n "$cfg_mirror_darcs" ] || { echo "Mirroring darcs is disabled" >&2; exit 0; }
505 case "$url" in
506 darcs://*) darcsurl="http://${url#darcs://}";;
507 *) darcsurl="${url#darcs+}";;
508 esac
509 # remove any stale lock files greater than 1 hour old in case
510 # darcs_fast_export was killed on the last update because it took too long
511 find -L *.darcs -maxdepth 2 -type f -name 'lock' -mmin +60 -exec rm -f '{}' + 2>/dev/null || :
512 bang git_darcs_fetch "$darcsurl"
514 bzr://*)
515 [ -n "$cfg_mirror_bzr" ] || { echo "Mirroring bzr is disabled" >&2; exit 0; }
516 bzrurl="${url#bzr://}"
517 bang git_bzr_fetch "$bzrurl"
519 hg+http://* | hg+https://* | hg+file://* | hg+ssh://*)
520 [ -n "$cfg_mirror_hg" ] || { echo "Mirroring hg is disabled" >&2; exit 0; }
521 # We just remove hg+ here, so hg+http://... becomes http://...
522 hgurl="${url#hg+}"
523 # Fetch any new updates
524 bang hg -R "$(pwd)/repo.hg" pull
525 # Do the fast-export | fast-import
526 bang git_hg_fetch
529 [ "$url" = "$(git config --get remote.origin.url || :)" ] || bang config_set_raw remote.origin.url "$url"
530 pruneopt=--prune
531 [ "$(git config --bool fetch.prune 2>/dev/null || :)" != "false" ] || pruneopt=
532 if ! is_gfi_mirror_url "$url"; then
533 lastwasclean=
534 [ "$(git config --bool girocco.lastupdateclean 2>/dev/null || :)" != "true" ] || lastwasclean=1
535 nextisclean=
536 [ "$(git config --bool girocco.cleanmirror 2>/dev/null || :)" != "true" ] || nextisclean=1
537 if [ "$nextisclean" != "$lastwasclean" ]; then
538 if [ -n "$nextisclean" ]; then
539 git config --replace-all remote.origin.fetch "+refs/heads/*:refs/heads/*"
540 git config --add remote.origin.fetch "+refs/tags/*:refs/tags/*"
541 git config --add remote.origin.fetch "+refs/notes/*:refs/notes/*"
542 git config --add remote.origin.fetch "+refs/top-bases/*:refs/top-bases/*"
543 git config --add remote.origin.fetch "+refs/replace/*:refs/replace/*"
544 else
545 git config --replace-all remote.origin.fetch "+refs/*:refs/*"
548 if ! create_quarantine; then
549 bang echo ""
550 bang echo "unable to create fetch quarantine area"
551 bang_cmd="create_quarantine"
552 bang_errcode=1
553 bang_failed
556 # remember the starting time so we can easily detect new packs for fast-import mirrors
557 # we sleep for 1 second after creating .gfipack to make sure all packs are newer
558 if is_gfi_mirror_url "$url" && [ ! -e .gfipack ]; then
559 rm -f .gfipack
560 >.gfipack
561 sleep 1
563 fetcharg="default"
564 git config remotes.default >/dev/null 2>&1 || fetcharg="--all"
565 GIT_DIR=.
566 [ -z "$use_quarantine" ] || GIT_DIR="$incoming_fetch"
567 fetchcmd="git_ulimit fetch"
568 [ "$show_progress" != "0" ] || fetchcmd="git_ulimit fetch -q"
569 if [ -n "$var_have_git_171" ] && [ "${show_progress:-0}" != "0" ]; then
570 # git fetch learned --progress in v1.7.1
571 case "$show_progress" in
572 [2-9]*|1[0-9]*)
573 # full volume progress with all the spammy noise
574 fetchcmd="git_ulimit fetch --progress"
577 # a kinder, gentler progress that doesn't leave one
578 # covered all over in exploded bits of spam afterwards
579 fetchcmd="git_fetch_q_progress"
581 esac
583 # It's possible for a fetch to actually do something while still returning
584 # a non-zero result (perhaps some of the refs were updated but some were
585 # not -- a malicious Git-impersonation trying to set refs/heads/... refs
586 # to non-commit objects for example).
587 GIT_SSL_NO_VERIFY=1 bang_catch eval "$fetchcmd" $pruneopt --multiple "$fetcharg"
588 unset GIT_SSL_NO_VERIFY
589 # If we did fetch anything, don't treat it as an error, but do keep the log;
590 # otherwise invoke bang_failed as for a normal failure
591 if [ "${bang_errcode:-0}" != "0" ]; then
592 save_bang_errcode="$bang_errcode"
593 check_after_refs
594 if [ -n "$refschanged" ]; then
595 keep_bang_log="git fetch${pruneopt:+ $pruneopt} --multiple $fetcharg"
596 else
597 bang_cmd="git fetch${pruneopt:+ $pruneopt} --multiple $fetcharg"
598 bang_errcode="$save_bang_errcode"
599 bang_failed
602 if [ -n "$use_quarantine" ]; then
603 check_after_refs
604 unset GIT_DIR
605 if [ -n "$refschanged" ] && ! unquarantine_updates; then
606 bang echo ""
607 bang echo "unable to unquarantine fetched updates"
608 bang_cmd="unquarantine_updates"
609 bang_errcode=1
610 bang_failed
612 else
613 unset GIT_DIR
615 if ! is_gfi_mirror_url "$url" && [ "$nextisclean" != "$lastwasclean" ]; then
616 if [ -n "$nextisclean" ]; then
617 # We must manually purge the unclean refs now as even prune won't do it
618 git for-each-ref --format='%(refname)' |
619 LC_ALL=C sed \
620 -e '/^refs\/heads\//d' \
621 -e '/^refs\/tags\//d' \
622 -e '/^refs\/notes\//d' \
623 -e '/^refs\/top-bases\//d' \
624 -e '/^refs\/replace\//d' \
625 -e 's/^/delete /' |
626 git_updateref_stdin
628 git config --bool girocco.lastupdateclean ${nextisclean:-0}
630 if [ -e .gfipack ] && is_gfi_mirror_url "$url"; then
631 find -L objects/pack -type f -newer .gfipack -name "pack-$octet20*.pack" -print >>gfi-packs
632 rm -f .gfipack
634 # Check to see if we've lost our HEAD
635 if ! git rev-parse --verify HEAD >/dev/null 2>&1; then
636 git config --unset girocco.headok >/dev/null 2>&1 || :
637 # Try to get it back
638 check_and_set_head || :
641 esac
643 # The objects subdirectories permissions must be updated now.
644 # In the case of a dumb http clone, the permissions will not be correct
645 # (missing group write) despite the core.sharedrepository=2 setting!
646 # The objects themselves seem to have the correct permissions.
647 # This problem appears to have been fixed in the most recent git versions.
648 perms=g+w
649 [ "$cfg_permission_control" != "Hooks" ] || perms=go+w
650 chmod $perms $(find -L objects -maxdepth 1 -type d) 2>/dev/null || :
652 # We maintain the last refresh date in two places deliberately
653 # so that it's available as part of the config data and also
654 # as a standalone file timestamp that can be accessed without git.
655 bang config_set lastrefresh "$(date "$datefmt")"
656 { >.last_refresh; } 2>/dev/null || :
658 # Check to see if any refs changed
659 check_after_refs
661 # Update server info if any refs changed (if they didn't packs shouldn't have either)
662 [ -z "$refschanged" ] || bang git update-server-info
664 # Pack all refs if any changed to keep things as efficient as possible
665 # Project mirror updates do not occur that often therefore this is a win
666 # However, if pack-refs fails for some reason, we can just ignore and continue
667 # The "--prune" option is the default since v1.5.0 but it serves as "documentation" here
668 [ -z "$refschanged" ] || git pack-refs --all --prune || :
670 # Force a mini-gc if $Girocco::Config::delay_gfi_redelta is false and there's
671 # at least one gfi pack present now
672 if [ -z "$cfg_delay_gfi_redelta" ] && ! [ -e .needsgc ] &&
673 [ -f gfi-packs ] && [ -s gfi-packs ] && is_gfi_mirror_url "$url"; then
674 >.needsgc
677 # Activate a mini-gc if needed
678 check_and_set_needsgc
680 # Look at which refs changed and trigger ref-change for these
681 sockpath="$cfg_chroot/etc/taskd.socket"
682 if [ -n "$refschanged" ]; then
683 bang config_set lastreceive "$(date '+%a, %d %b %Y %T %z')"
684 # We always use UTC for the log timestamp so that chroot and non-chroot match up.
685 # We don't have to worry about multiple log files since only one update runs
686 lognamets="$(TZ=UTC date '+%Y%m%d_%H%M%S')"
687 loghhmmss="${lognamets##*_}"
688 logname="reflogs/${lognamets%%_*}"
689 # We freshen the mod time to now on any old or new ref that is a loose object
690 # For old refs we do it so we will be able to keep them around for 1 day
691 # For new refs we do it in case we are about to run gc and the new ref
692 # actually points to an oldish loose object that had been unreachable
693 # We probably do not need to do it for new refs as Git tries to do that,
694 # but since we're already doing it for old refs (which Git does not do),
695 # it's almost no extra work for new refs, just in case.
697 echo "ref-changes %$proj% $proj"
698 LC_ALL=C join .refs-before .refs-after |
699 LC_ALL=C sed -e '/^[^ ][^ ]* \([^ ][^ ]*\) \1$/d' |
700 while read ref old new; do
701 echo "$loghhmmss $old $new $ref" >&3
702 freshen_loose_objects "$old" "$new"
703 echo "$old $new $ref"
704 done
705 LC_ALL=C join -v 1 .refs-before .refs-after |
706 while read ref old; do
707 echo "$loghhmmss $old 0000000000000000000000000000000000000000 $ref" >&3
708 freshen_loose_objects "$old"
709 echo "$old 0000000000000000000000000000000000000000 $ref"
710 done
711 LC_ALL=C join -v 2 .refs-before .refs-after |
712 while read ref new; do
713 echo "$loghhmmss 0000000000000000000000000000000000000000 $new $ref" >&3
714 freshen_loose_objects "$new"
715 echo "0000000000000000000000000000000000000000 $new $ref"
716 done
717 git for-each-ref --format='%(objectname) %(objectname) %(refname)' refs/heads
718 echo "done ref-changes %$proj% $proj"
719 } >.refs-temp 3>>"$logname"
720 if [ -S "$sockpath" ]; then
721 trap ':' PIPE
722 nc_openbsd -w 15 -U "$sockpath" <.refs-temp || :
723 trap - PIPE
725 bang config_set lastchange "$(date '+%a, %d %b %Y %T %z')"
726 bang_eval "git for-each-ref --sort=-committerdate --format='%(committerdate:iso8601)' \
727 --count=1 refs/heads >info/lastactivity"
728 ! [ -d htmlcache ] || { >htmlcache/changed; } 2>/dev/null || :
729 rm -f .delaygc .allowgc
731 [ "${cfg_autogchack:-0}" != "0" ] &&
732 [ "$(git config --get --bool girocco.autogchack 2>/dev/null)" != "false" ]
733 then
734 mv -f .refs-after .refs-last
738 # If the repository does not yet have a valid HEAD symref try to set one
739 # If an empty repository was cloned and then later becomes unempty you just
740 # lose out on the fancy "symref=HEAD:" logic and get this version instead
741 check_and_set_head || :
743 rm -f .refs-before .refs-after .refs-temp FETCH_HEAD
745 if is_banged; then
746 [ -z "$mailaddrs" ] || ! was_banged_message_sent ||
748 echo "$proj update succeeded - failure recovery"
749 echo "this status message may be disabled on the project admin page"
750 } | mailref "update@$cfg_gitweburl/$proj.git" -s "[$cfg_name] $proj update succeeded" "$mailaddrs" || :
751 bang_reset
754 if [ -n "$keep_bang_log" ] && [ -s "$bang_log" ]; then
755 cat "$bang_log" >.banglog
756 echo "" >>.banglog
757 echo "$keep_bang_log failed with error code $save_bang_errcode" >>.banglog
760 progress "- [$proj] update ($(date))"