gc-git-svn: arrange for `git-svn fetch` to run gc-git-svn.sh
[girocco.git] / jobd / gc.sh
blob51e47985e70fb0b48f2f5815310265aea0bf1f3f
1 #!/bin/sh
3 # NOTE: additional options can be passed to git repack by specifying
4 # them after the project name, for example:
5 # gc.sh my-project -f
7 . @basedir@/shlib.sh
9 set -e
11 if [ $# -lt 1 ]; then
12 echo "Usage: gc.sh projname [extra-repack-args]" >&2
13 exit 1
16 # Includes
17 _shlib_done=1
18 unset GIROCCO_SUPPRESS_AUTO_GC_UPDATE
19 . "$cfg_basedir/jobd/maintain-auto-gc-hack.sh"
20 . "$cfg_basedir/jobd/generate-auto-gc-update.sh"
21 GIROCCO_SUPPRESS_AUTO_GC_UPDATE=1 && export GIROCCO_SUPPRESS_AUTO_GC_UPDATE
23 # packing options
24 packopts="--depth=50 --window=50 --window-memory=${var_window_memory:-1g}"
25 quiet="-q"; [ "${show_progress:-0}" = "0" ] || quiet=
27 umask 002
28 [ "$cfg_permission_control" != "Hooks" ] || umask 000
29 clean_git_env
31 vcnt() {
32 eval "$1="'$(( $# - 1 ))'
35 pidactive() {
36 if _result="$(kill -0 "$1" 2>&1)"; then
37 # process exists and we have permission to signal it
38 return 0
40 case "$_result" in *"not permitted"*)
41 # we do not have permission to signal the process
42 return 0
43 esac
44 # process does not exist
45 return 1
48 createlock() {
49 # A .lock file should only exist for much less than a second.
50 # If we see a stale lock file (> 1h old), remove it and then,
51 # just in case, wait 30 seconds for any process whose .lock
52 # we might have just removed (it's racy) to finish doing what
53 # should take much less than a second to do.
54 _stalelock="$(find -L "$1.lock" -maxdepth 1 -mmin +60 -print 2>/dev/null)" || :
55 if [ -n "$_stalelock" ]; then
56 rm -f "$_stalelock"
57 sleep 30
59 for _try in p p n; do
60 if (set -C; >"$1.lock") 2>/dev/null; then
61 echo "$1.lock"
62 return 0
64 # delay and try again
65 [ "$_try" != "p" ] || sleep 1
66 done
67 # cannot create lock file
68 return 1
71 # The pre-receive script creates one ref log file per push but we want them to
72 # be coalesced into one ref log file per day. We are guaranteed that any files
73 # we find to coalesce are NOT currently being written to since they are always
74 # written first as temporary files and then moved into place. We attempt to
75 # transfer the most recent modification time to the coalesced log file which
76 # would step on its mod time if it were being written to directly, but if we
77 # find per-process ref log files then it must be a push project and the only
78 # thing that would write directly to the main per-day log file would be a
79 # mirror project so there's actually no conflict.
80 # Also, if the clock is wonky (or was futzed with) we may have both YYYYMMDD
81 # and YYYYMMDD.gz present in which case combine them into YYYYMMDD
82 coalesce_reflogs() {
83 [ -d reflogs ] || return 0
84 rm -f .gc_failed
85 find -L reflogs -maxdepth 1 -type f -name "[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9]" -print |
86 while read -r rname; do
87 if [ -e "$rname.gz" ]; then
88 if [ -s "$rname" ]; then
89 # Presumably the .gz file must have been created before the non-gz
90 # file since it had to be uncompressed at some point therefore
91 # we need to append the non-gz contents to it but keep the non-gz
92 # contents timestamp so we rename to YYYYMMDD_ which will sort first
93 # and be picked up in the next step if we are interrupted in the middle.
94 # If a YYYYMMDD_ file already exists we append to it and transfer the
95 # timestamp. Finally we transfer the YYYYMMDD_ timestamp to the result
96 # and remove the YYYYMMDD_ temporary file leaving the result uncompressed.
97 if [ -e "${rname}_" ]; then
98 cat "$rname" >>"${rname}_"
99 touch -r "$rname" "${rname}_"
100 rm -f "$rname"
101 ! [ -e "$rname" ]
102 else
103 mv "$rname" "${rname}_"
105 gzip -d "$rname.gz" </dev/null
106 [ -e "$rname" ] && ! [ -e "$rname.gz" ]
107 cat "${rname}_" >>"$rname"
108 touch -r "${rname}_" "$rname"
109 rm -f "${rname}_"
110 else
111 # Just remove the empty file to resolve the problem
112 rm -f "$rname"
115 done
116 find -L reflogs -maxdepth 1 -type f -name "[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9]_*" -print | LC_ALL=C sort |
117 while read -r rname; do
118 logname="${rname%%_*}"
119 # If someone's been futzing with the date, the file we want to
120 # append to could already have been compressed, so we just uncompress
121 # it here. The previous block guarantees we do not have both a compressed
122 # and uncompressed version present at the same time.
123 if [ -e "$logname.gz" ]; then
124 gzip -d "$logname.gz" </dev/null
125 [ -e "$logname" ] && ! [ -e "$logname.gz" ]
127 cat "$rname" >>"$logname"
128 touch -r "$rname" "$logname"
129 rm -f "$rname"
130 if [ -e "$rname" ]; then
131 >.gc_failed
132 echo "! [$proj] failed to remove $rname" >&2
133 exit 1 # will only exit subshell created by "|"
135 done
136 ! [ -e .gc_failed ]
139 # Remove any files in reflogs that are older than $cfg_reflogs_lifetime days
140 prune_reflogs() {
141 [ -d reflogs ] || return 0
142 exp="$(( ${cfg_reflogs_lifetime:-1} * 1440 ))"
143 [ $exp -gt 0 ] || exp=1440
144 [ $exp -le 43200 ] || exp=43200
145 find -L reflogs -maxdepth 1 -type f -mmin "+$exp" -exec rm -f '{}' + || :
148 # Compact any reflogs that are not today's UTC date unless a .gz version exists
149 compact_reflogs() {
150 [ -d reflogs ] || return 0
151 _td="reflogs/$(TZ=UTC date '+%Y%m%d')"
152 find -L reflogs -maxdepth 1 -type f -name "[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9]" -print |
153 while read -r rname; do
154 [ "$rname" != "$_td" ] || continue
155 ! [ -e "$rname.gz" ] || continue
156 gzip -9 "$rname" </dev/null
157 done
160 # return true if there's more than one objects/pack-<sha>.pack file or
161 # ANY sha-1 files in objects or
162 # there's one pack and it's not a normal pack name or
163 # there's one pack but not any refs
164 is_dirty() {
165 _packs="$(find -L objects/pack -name "pre-auto-gc-[12].pack" -prune -o -name "*.pack" -type f -print 2>/dev/null | head -n 2)"
166 vcnt _packscnt $_packs
167 if [ $_packscnt -gt 1 ]; then
168 return 0
170 if [ $_packscnt -eq 1 ]; then
171 # the single pack name is in $_packs
172 _packs="${_packs%.pack}"
173 _packs="${_packs#objects/pack/}"
174 case "$_packs" in
175 pack-*)
176 _packs="${_packs#pack-}"
177 if [ "${#_packs}" -lt 40 ] || [ "${_packs#*[!0-9a-fA-F]}" != "$_packs" ]; then
178 # name not exclusively 40 or more hexadecimal digits makes it dirty
179 return 0
183 # abnormal name makes it dirty
184 return 0
186 esac
188 _objs=$(find -L objects/$octet -name "$octet19*" -type f -print 2>/dev/null | head -n 1 | LC_ALL=C wc -l)
189 [ $_objs -eq 0 ] || return 0
190 [ $_packscnt -eq 1 ] || return 1
191 # we do this check last because it's potentially the most expensive;
192 # at this point we know we do not have any loose objects, but we do
193 # have one pack that's named "normally"; empty refs => dirty
194 is_empty_refs_dir
197 # make sure combine-packs uses the correct Git executable
198 run_combine_packs() {
199 PATH="$var_git_exec_path:$cfg_basedir/bin:$PATH" @basedir@/jobd/combine-packs.sh "$@"
202 # duplicate the first file to the name given by the second file making sure that
203 # the second file appears atomically all-at-once after the copy has been completed
204 # and does not appear at all if the copy fails (in which case this function fails)
205 # if the second file already exists this function fails with status 1
206 # if the file names are the same this function returns immediately with success
207 dupe_file() {
208 [ "$1" != "$2" ] || return 0
209 ! [ -e "$2" ] || return 1
210 case "$2" in
211 *?/?*) _tmpdir="${2%/*}";;
212 *) _tmpdir=".";;
213 esac
214 _tmpfile="$(mktemp "${_tmpdir:-.}/packtmp-XXXXXX")" || return 1
215 cp -fp "$1" "$_tmpfile" || return 1
216 mv -f "$_tmpfile" "$2"
219 # rename_pack oldnamepath newnamepath
220 # note that .keep and .bndl files are left untouched and not moved at all!
221 rename_pack() {
222 [ $# -eq 2 ] && [ "$1" != "$2" ] || {
223 echo >&2 "[$proj] incorrect use of rename_pack function"
224 exit 1
226 # Git assumes that if the destination of the rename already exists
227 # that it is, in fact, a copy of the same bytes so silently succeeds
228 # without doing anything. We duplicate that logic here.
229 # Git checks for the .idx file first before even trying to use a pack
230 # so it should be the last moved and the first removed.
231 for ext in pack bitmap idx; do
232 [ -f "$1.$ext" ] || continue
233 ln "$1.$ext" "$2.$ext" >/dev/null 2>&1 ||
234 dupe_file "$1.$ext" "$2.$ext" >/dev/null 2>&1 ||
235 [ -f "$2.$ext" ] || {
236 echo >&2 "[$proj] unable to move $1.$ext to $2.$ext"
237 exit 1
239 done
240 for ext in idx pack bitmap; do
241 rm -f "$1.$ext"
242 done
243 return 0
246 # combine the input pack(s) into a new pack (or possibly packs if packSizeLimit set)
247 # input pack names are read from standard input one per line delimited by the first
248 # ':', ' ' or '\n' character on the line (which allows gfi-packs to be read directly)
249 # all arguments, if any, are passed to pack-objects as additional options
250 # returns non-zero on failure AND creates .gc_failed in that case
251 combine_packs() {
252 rm -f .gc_failed
253 find -L objects/pack -maxdepth 1 -type f -name '*.zap*' -exec rm -f '{}' + || :
254 run_combine_packs --replace "$@" $packopts --all-progress-implied $quiet --non-empty || {
255 >.gc_failed
256 return 1
258 return 0
261 # if the current directory is_gfi_mirror then repack all packs listed in gfi-packs
262 repack_gfi_packs() {
263 [ -n "$gfi_mirror" ] || return 0
264 [ -d objects/pack ] || { rm -f gfi-packs; return 0; }
265 progress "~ [$proj] redeltifying poor quality git fast-import packs"
266 combine_packs --ignore-missing --no-reuse-delta <gfi-packs
267 rm -f gfi-packs
268 return 0
271 # see if there are "lotsa" loose objects
272 # "lotsa" is defined as the 17, 68, 71 and 86 object directories existing
273 # and there being at least 5 total objects between them which corresponds
274 # to an approximate average of 320 loose objects before this function starts
275 # returning true and triggering a "mini" gc to pack up loose objects
276 lotsa_loose_objects() {
277 [ -d objects/17 ] && [ -d objects/68 ] && [ -d objects/71 ] && [ -d objects/86 ] || return 1
278 _objs=$(( $(find -L objects/17 objects/68 objects/71 objects/86 -maxdepth 1 -name "$octet19*" -type f -print 2>/dev/null | LC_ALL=C wc -l) ))
279 [ ${_objs:-0} -ge 5 ]
282 # pack any existing loose objects into a new _l.pack file then run prune-packed
283 # note that prune-packed is NOT run beforehand -- the caller must do that if needed
284 # loose objects need not be part of complete commits/trees as --weak-naming is used
285 pack_loose_objects() {
286 _lpacks="$(run_combine_packs </dev/null --names --loose --weak-naming --non-empty --all-progress-implied ${quiet:---progress} $packopts)"
287 if [ -n "$_lpacks" ]; then
288 # We need to identify these packs later so we don't combine_packs them
289 for _objpack in $_lpacks; do
290 rename_pack "objects/pack/pack-$_objpack" "objects/pack/pack-${_objpack}_l" || :
291 done
292 git prune-packed $quiet
296 # combine small packs into larger pack(s)
297 # we avoid any _[lo], keep, bndl or bitmap packs
298 # if the optional argument is non-empty even a single small pack will be redeltad
299 combine_small_packs() {
300 _didprogress=
301 _minsmallpacks=2
302 if [ -n "$1" ] && [ -n "$noreusedeltaopt" ]; then
303 _minsmallpacks=1
305 _lpo="--exclude-no-idx --exclude-keep --exclude-bitmap --exclude-bndl"
306 _lpo="$_lpo --exclude-sfx _u --exclude-sfx _o --exclude-sfx _l"
307 _lpo="$_lpo --quiet --object-limit $var_redelta_threshold objects/pack"
308 while
309 _cnt="$(list_packs --count $_lpo)" || :
310 test "${_cnt:-0}" -ge $_minsmallpacks
312 [ -n "$_didprogress" ] || {
313 progress "~ [$proj] combining small packs into a single larger pack"
314 _didprogress=1
316 _newp="$(list_packs $_lpo | combine_packs --names $noreusedeltaopt)"
317 vcnt _newc $_newp
318 # be paranoid and exit the loop if we haven't reduced the number of packs
319 [ $_newc -lt $_cnt ] || break
320 _minsmallpacks=2
321 done
322 return 0
325 # combine small _l packs into larger pack(s) using --weak-naming
326 # we avoid any non _l, keep, bndl or bitmap packs
327 # if the optional 2nd argument is non-empty even a single small pack will be redeltad
328 combine_small_loose_packs() {
329 _didprogress=
330 _minsmallpacks=2
331 if [ -n "$1" ] && [ -n "$noreusedeltaopt" ]; then
332 _minsmallpacks=1
334 _lpo="--exclude-no-idx --exclude-keep --exclude-bitmap --exclude-bndl"
335 _lpo="$_lpo --exclude-no-sfx _l"
336 _lpo="$_lpo --quiet --object-limit $var_redelta_threshold objects/pack"
337 while
338 _cnt="$(list_packs --count $_lpo)" || :
339 test "${_cnt:-0}" -ge $_minsmallpacks
341 [ -n "$_didprogress" ] || {
342 progress "~ [$proj] combining small loose packs into a single larger pack"
343 _didprogress=1
345 _newp="$(list_packs $_lpo | combine_packs --names --weak-naming $noreusedeltaopt)"
346 # We need to identify these packs later so we don't combine_packs them
347 for _objpack in $_newp; do
348 rename_pack "objects/pack/pack-$_objpack" "objects/pack/pack-${_objpack}_l" || :
349 done
350 vcnt _newc $_newp
351 # be paranoid and exit the loop if we haven't reduced the number of packs
352 [ $_newc -lt $_cnt ] || break
353 _minsmallpacks=2
354 done
355 return 0
358 # Unfortunately, some fetch strategies (e.g. git-svn and non-smart HTTP) lack
359 # the ability to store newly fetched objects in a pack.
360 # However, the fetch code conveniently sets .needspack just before it fetches
361 # so that it's easy to find all the loose objects that have been fetched and
362 # combine them into a pack. The --no-reuse-delta option is meaningless here
363 # since everything to be packed is a loose object and therefore not a delta so
364 # deltification will always take place.
365 make_needs_pack() {
366 [ -f .needspack ] || return 0
367 rm -f .needspackgc
368 mv -f .needspack .needspackgc
369 progress "~ [$proj] combining fetched loose objects into a pack"
370 _newp="$(find -L objects/$octet -maxdepth 1 -type f -newer .needspackgc -name "$octet19*" -print 2>/dev/null |
371 LC_ALL=C awk -F / '{print $2 $3}' |
372 run_combine_packs --objects --names $packopts --incremental --all-progress-implied $quiet --non-empty)" || {
373 # We used to fail gc here.
374 # Now, however, we just ignore the failure because we have
375 # another mechanism to handle loose objects and it's possible
376 # that the fetcher somehow brought in unconnected objects which
377 # would cause the above combine-packs to fail.
378 # By ignoring the failure and just removing the .needspack file
379 # the loose objects will be treated as "ordinary" loose objects
380 # and packed using the "--weak-naming" option which can handle
381 # broken connectivity.
382 # That's a better solution than just failing here or leaving
383 # .needspack behind to potentially continue to fail again and
384 # again.
385 _newp=
387 if [ -n "$_newp" ]; then
388 # remove the now-redundant loose objects -- this is always safe
389 # even during a concurrent push because a reprepare_packed_git
390 # will be triggered if an object that should be there is not
391 # found thereby finding it in the new pack instead
392 git prune-packed $quiet
394 rm -f .needspackgc
397 # HEADSHA="$(pack_is_complete /full/path/to/some.pack /full/path/to/packed-refs "$(cat HEAD)")"
398 pack_is_complete() {
399 # Must have a matching .idx file and a non-empty packed-refs file
400 [ -s "${1%.pack}.idx" ] || return 1
401 [ -s "$2" ] || return 1
402 _headsha=
403 case "$3" in
404 $octet20*)
405 _headsha="$3"
407 "ref: refs/"?*|"ref:refs/"?*|"refs/"?*)
408 _headmatch="${3#ref:}"
409 _headmatch="${_headmatch# }"
410 _headmatchpat="$(echo "$_headmatch" | LC_ALL=C sed -e 's/\([.$]\)/\\\1/g')"
411 _headsha="$(LC_ALL=C grep -e "^$octet20$hexdig* $_headmatchpat\$" <"$2" |
412 LC_ALL=C cut -d ' ' -f 1)"
413 case "$_headsha" in $octet20*) :;; *)
414 return 1
415 esac
418 # bad HEAD
419 return 1
420 esac
421 rm -rf pack_is_complete_test
422 mkdir pack_is_complete_test
423 mkdir pack_is_complete_test/refs
424 mkdir pack_is_complete_test/objects
425 mkdir pack_is_complete_test/objects/pack
426 echo "$_headsha" >pack_is_complete_test/HEAD
427 ln -s "$1" pack_is_complete_test/objects/pack/
428 ln -s "${1%.pack}.idx" pack_is_complete_test/objects/pack/
429 ln -s "$2" pack_is_complete_test/packed-refs
430 _count="$(git --git-dir=pack_is_complete_test rev-list --count --all 2>/dev/null)" || :
431 rm -rf pack_is_complete_test
432 [ -n "$_count" ] || return 1
433 [ "$_count" -gt 0 ] 2>/dev/null || return 1
434 echo "$_headsha"
437 # On return a "$lockf" will have been created that must be removed when gc is done
438 lock_gc() {
439 # be compatibile with gc.pid file from newer Git releases
440 lockf=gc.pid
441 hn="$(hostname)"
442 active=
443 if [ "$(createlock "$lockf")" ]; then
444 # If $lockf is:
445 # 1) less than 12 hours old
446 # 2) contains two fields (pid hostname) NO trailing NL
447 # 3) the hostname is different OR the pid is still alive
448 # then we exit as another active process is holding the lock
449 if [ "$(find -L "$lockf" -maxdepth 1 -mmin -720 -print 2>/dev/null)" ]; then
450 apid=
451 ahost=
452 read -r apid ahost ajunk <"$lockf" || :
453 if [ "$apid" ] && [ "$ahost" ]; then
454 if [ "$ahost" != "$hn" ] || pidactive "$apid"; then
455 active=1
459 else
460 echo >&2 "[$proj] unable to create gc.pid.lock file"
461 exit 1
463 if [ -n "$active" ]; then
464 rm -f "$lockf.lock"
465 echo >&2 "[$proj] gc already running on machine '$ahost' pid '$apid'"
466 exit 1
468 printf "%s %s" "$$" "$hn" >"$lockf.lock"
469 chmod 0664 "$lockf.lock"
470 mv -f "$lockf.lock" "$lockf"
473 # Create a repack subdirectory such that running repack in it will pack the
474 # same things that a pack in the normal directory would except that the pack
475 # is guaranteed to be generated in an optimized order by adding a suitable
476 # synthesized ref in the refs/tags namespace (yes, pack-objects.c really does
477 # behave differently depending on the contents of the refs/tags namespace).
478 # Before calling this, pack-refs --all MUST be performed or the wrong pack
479 # will end up being made.
481 # If a ref deletion is pushed after making the repack subdir but before the
482 # the actual repack, the discarded objects will be packed -- no big deal,
483 # they'll get discarded the next time gc runs.
485 # If a fast-forward ref update is pushed after making the repack subdir but
486 # before the actual repack, it will be picked up and the new objects packed
487 # (subject to the normal git repack race about picking such updates up).
489 # If a non-fast-forward ref update is pushed after making the repack subdir but
490 # before the actual repack, it will be picked up like a fast-forward update but
491 # the discarded objects will be included like a ref deletion (until the next
492 # scheduled gc takes place).
494 # We retain a copy of the original packed-refs file as repack/packed-refs.orig
495 # If ref deletions come in while we're repacking, the original packed-refs
496 # file will be modified, but we'll still pack the deleted ref(s).
497 # If the packed-refs.orig file is used to create the bundle header we avoid
498 # a situation where the bundle contains a ref state that never actually
499 # existed in reality (for example a new branch is pushed and then an old
500 # branch deleted afterwards -- the deletion would show up in the bundle
501 # because it will cause the original packed-refs file to be re-written, but
502 # the new branch creation will not unless we do another pack-refs which might
503 # lead to having in incomplete bundle). Therefore we want to keep a copy of
504 # the original packed-refs file around. We do the same thing for HEAD.
506 # It's possible that the "objects" subdirectory is a symbolic link.
507 # Git does support this. However, during the repacking process, new packs
508 # will be created in repack/alt/pack and then moved into objects/pack.
509 # In order for this to work seemlessly, they must both be on the same
510 # filesystem. But when objects (or even objects/pack) is a symbolic link they
511 # might not be. For this reason a "repack" subdirectory is created under
512 # objects/pack and the repack/alt/pack directory symbolicly linked to it.
514 # Git allows not just HEAD to be a symbolic-ref, but any ref anywhere in the
515 # refs namespace. We are concerned about ref name collisions and getting the
516 # right tag set to get an optimal pack. We can safely duplicate the ref space
517 # under refs/heads, refs/notes and refs/remotes without any risk of unwanted
518 # collisions and this will likely make over 99%+ of all symbolic refs found
519 # in the wild work properly. Girocco itself never creates any symbolic refs
520 # inside the refs namespace; this is a nod to simultaneously using a Girocco
521 # repository for other purposes.
522 make_repack_dir() {
523 ! [ -d repack ] || rm -rf repack
524 ! [ -d repack ] || { echo >&2 "[$proj] cannot remove repack subdirectory"; exit 1; }
525 [ -d objects/pack ] || mkdir -p objects/pack
526 ! [ -d objects/pack/repack ] || rm -rf objects/pack/repack
527 ! [ -d objects/pack/repack ] || { echo >&2 "[$proj] cannot remove objects/pack/repack subdirectory"; exit 1; }
528 mkdir repack repack/refs repack/alt objects/pack/repack
529 [ -d info ] || mkdir info
530 ln -s ../config repack/config
531 ln -s ../info repack/info
532 ln -s ../objects repack/objects
533 ln -s "$PWD/objects/pack/repack" repack/alt/pack
534 ln -s ../../refs repack/refs/refs
535 ! [ -d logs ] || ln -s ../logs repack/logs
536 ! [ -d worktrees ] || ln -s ../worktrees repack/worktrees
537 _lines=$(( $(LC_ALL=C wc -l <packed-refs) ))
538 cat HEAD >repack/HEAD.orig
539 >repack/packed-refs.extra
540 _xtralines=0
541 cat packed-refs >repack/packed-refs.orig
542 if [ $(LC_ALL=C wc -l <repack/packed-refs.orig) -ne "$_lines" ]; then
543 echo >&2 "[$proj] error: make_repack_dir failed original packed-refs line count sanity check"
544 exit 1
546 if [ "${cfg_fetch_stash_refs:-0}" = "0" ]; then
547 # migrate any refs/stash or refs/tgstash lines to repack/packed-refs.extra
548 <repack/packed-refs.orig LC_ALL=C awk -v xtra="repack/packed-refs.extra" '
549 BEGIN { peeling = 0 }
550 NR == 1 && /^#/ { print; next; }
551 peeling && /^\^/ { print >>xtra; next; }
552 /^[0-9A-Fa-f][0-9A-Fa-f][0-9A-Fa-f][0-9A-Fa-f]+ refs\/(stash|tgstash)(\/|$)/ {
553 peeling = 1
554 print >>xtra
555 next
557 { peeling = 0; print; }
558 ' >repack/packed-refs.new
559 _xtralines="$(( $(LC_ALL=C wc -l <repack/packed-refs.extra) + 0 ))"
560 _newlines="$(( $(LC_ALL=C wc -l <repack/packed-refs.new) + 0 ))"
561 if [ "$(( $_newlines + $_xtralines ))" -ne "$_lines" ]; then
562 echo >&2 "[$proj] error: make_repack_dir failed packed-refs.extra line count sanity check"
563 exit 1
565 mv -f repack/packed-refs.new repack/packed-refs.orig
566 _lines="$_newlines"
568 # Note: Git v1.5.0 introduced the "# pack-refs with:" header line for the packed-refs file
569 sed '/^# pack-refs/d; s, refs/, refs/!/,' <repack/packed-refs.orig >repack/packed-refs
570 nohead=
571 headref="$(git rev-parse --verify --quiet HEAD)" || :
572 if [ -n "$headref" ]; then
573 echo "$headref refs/!=/HEAD" >>repack/packed-refs
574 echo "$headref refs/heads/!" >>repack/packed-refs
575 nohead='\, refs/heads/!$,d; '
576 _lines=$(( $_lines + 2 ))
578 if [ $(( $(LC_ALL=C wc -l <repack/packed-refs) + 1 )) -ne "$_lines" ]; then
579 echo >&2 "[$proj] error: make_repack_dir failed packed-refs initial line count sanity check"
580 exit 1
582 sed -n "$nohead"'\, refs/heads/,p; \, refs/notes/,p; \, refs/remotes/,p' <repack/packed-refs.orig >>repack/packed-refs
583 _newlines="$(( $(LC_ALL=C wc -l <repack/packed-refs) ))"
584 if [ $(( $_newlines + 1 )) -lt "$_lines" ]; then
585 echo >&2 "[$proj] error: make_repack_dir failed packed-refs extra line count sanity check"
586 exit 1
588 _lines="$_newlines"
589 optref="$(git rev-list -n 1 --all 2>/dev/null)" || :
590 if [ -n "$optref" ]; then
591 echo "$optref refs/tags/!" >>repack/packed-refs
592 _lines=$(( $_lines + 1 ))
593 echo "$optref" >repack/HEAD
594 else
595 cat HEAD >repack/HEAD
597 if [ $(LC_ALL=C wc -l <repack/packed-refs) -ne "$_lines" ]; then
598 echo >&2 "[$proj] error: make_repack_dir failed packed-refs line count sanity check"
599 exit 1
603 # Remove any crud that's been left behind by interrupted operations
604 # that did not clean up after themselves
605 remove_crud() {
606 # Remove any existing FETCH_HEAD
607 # There can only be a FETCH_HEAD if we've been fetching, not if we've been
608 # receiving pushes (those never create a FETCH_HEAD).
609 # And if we're fetching because we're a mirror, we know we're not fetching right
610 # now since jobd.pl never runs a project's fetch simultaneously with its gc.
611 # Therefore any existing FETCH_HEAD is junk. And it may be many megabytes if
612 # there were a lot of refs.
613 rm -f FETCH_HEAD
615 # remove any existing pack_is_complete_test or repack subdirectories
616 # If either exists when this function is called it's crud
617 rm -rf pack_is_complete_test repack objects/pack/repack
619 # Remove any stale pack remnants that are more than an hour old.
620 # Stale pack fragments are defined as any pack-<sha1>.ext where .ext is NOT
621 # .pack AND the corresponding .pack DOES NOT exist. A bunch of stale
622 # pack-<sha1>.idx files without their corresponding .pack files are worthless
623 # and just waste space. Normally there shouldn't be any remnants but actually
624 # this can happen when things are interrupted at just the wrong time.
625 # Note that the objects/pack directory is created by git init and should
626 # always exist.
627 find -L objects/pack -maxdepth 1 -type f -mmin +60 -name "pack-$octet20*.?*" -print |
628 LC_ALL=C sed -e 's/^objects\/pack\/pack-//; s/\..*$//' | LC_ALL=C sort -u |
629 while read packsha; do
630 ! [ -e "objects/pack/pack-$packsha.pack" ] || continue
631 rm -f "objects/pack/pack-$packsha".?*
632 done
634 # Remove any stale tmp reflogs files that are more than one hour old.
635 # Since they are created only while the pre-receive hook is running and
636 # all it does is process a bunch of refs passed to it on standard input
637 # it's inconceivable that it would ever take as much as an hour to run.
638 if [ -d reflogs ]; then
639 find -L reflogs -maxdepth 1 -type f -mmin +60 -name "tmp_*" -exec rm -f '{}' + || :
642 # Remove any stale object tmp_obj_* files that are more than 3 hours old.
643 # Really these files should only exist very briefly so there shouldn't be any
644 # but things happen that can end up leaving them behind.
645 find -L objects/$octet -maxdepth 1 -type f -mmin +180 -name "tmp_obj_?*" -exec rm -f '{}' + 2>/dev/null || :
647 # Remove any stale pack .keep files that are more than 12 hours old.
648 # We don't do anything to create any permanent pack .keep files, so they must
649 # be remnants from some failed push or something. Removing the .keep will
650 # allow the pack to be properly repacked.
651 find -L objects/pack -maxdepth 1 -type f -mmin +720 -name "pack-$octet20*.keep" -exec rm -f '{}' + || :
653 # Remove any stale tmp_pack_*, tmp_idx_*, tmp_bitmap_*, packtmp-* or .tmp-*-pack* files
654 # that are more than 12 hours old.
655 find -L objects/pack -maxdepth 1 -type f -mmin +720 \( \
656 -name "tmp_pack_?*" -o -name "tmp_idx_?*" -o -name "tmp_bitmap_?*" -o \
657 -name "packtmp-?*" -o -name ".tmp-?*-pack*" \
658 \) -exec rm -f '{}' + || :
660 # Remove any stale incoming-* object quarantine directories that are
661 # more than 12 hours old. These are new with Git >= 2.11.0.
662 find -L objects -maxdepth 1 -type d -name 'incoming-?*' -mmin +720 \
663 -exec rm -rf '{}' + || :
665 # Remove any stale shallow_* files that are more than 12 hours old.
666 # These can be left behind by Git >= 1.8.4.2 and < 2.0.0 when a client
667 # requests a shallow clone. Also discard stale .refs-temp* and
668 # .refs-new* files at the same time.
669 find -L . -maxdepth 1 -type f -mmin +720 \( \
670 -name "shallow_?*" -o -name ".refs-temp*" -o -name ".refs-new*" \
671 \) -exec rm -f '{}' + || :
673 # Remove any stale cmbnpcks-* dirs that are more than 12 hours old.
674 # These can be left behind by abnormal exits (e.g. power failure).
675 find -L . -maxdepth 1 -type d -mmin +720 -name "cmbnpcks-?*" \
676 -exec rm -rf '{}' + || :
678 # Remove any stale *.temp files in the objects area that are more than 12 hours old.
679 # This can be stale sha1.temp, or stale *.pack.temp so we kill all stale *.temp.
680 find -L objects -type f -mmin +720 -name "*.temp" -exec rm -f '{}' + || :
682 # Remove any stale *.lock files in the htmlcache area that might have been left
683 # behind after an abnormal exit during an attempt to update a cached file and
684 # are more than 1 hour old.
685 ! [ -d htmlcache ] || find -L htmlcache -type f -mmin +60 -name "*.lock" -exec rm -f '{}' + || :
687 # Remove any stale git-svn temp files that are more than 12 hours old.
688 # The git-svn process creates temp files with random 10 character names
689 # in the root of $GIT_DIR. Unfortunately they do not have a recognizable
690 # prefix, so we just have to kill any files with a 10-character name. We
691 # do this only for git-svn mirrors. All characters are chosen from
692 # [A-Za-z0-9_] so we can at least check that and fortunately the only
693 # collision is 'FETCH_HEAD' but that shouldn't matter.
694 # There may also be temp files with a Git_ prefix as well.
695 if [ -n "$svn_mirror" ]; then
696 _randchar='[A-Za-z0-9_]'
697 _randchar2="$_randchar$_randchar"
698 _randchar4="$_randchar2$_randchar2"
699 _randchar10="$_randchar4$_randchar4$_randchar2"
700 find -L . -maxdepth 1 -type f -mmin +720 -name "$_randchar10" -exec rm -f '{}' + || :
701 find -L . -maxdepth 1 -type f -mmin +720 -name "Git_*" -exec rm -f '{}' + || :
704 # Remove any stale fast_import_crash_<pid> files that are more than 3 days old.
705 if [ -n "$gfi_mirror" ]; then
706 find -L . -maxdepth 1 -type f -mmin +4320 -name "fast_import_crash_?*" -exec rm -f '{}' + || :
709 # Remove any stale core or *.core or core.* files that are more than 3 days old.
710 find -L . -maxdepth 1 -type f -mmin +4320 \( -name "core" -o -name "*.core" -o -name "core.*" \) \
711 -exec rm -f '{}' + || :
715 ## Garbage Collection Types
717 ## There are two kinds of possible garbage collection (gc) operations:
719 ## 1. A normal, full gc
720 ## 2. A "mini" gc
722 ## If the full garbage collection interval has expired (or gc has never been
723 ## run), then a normal, full gc will take place. Otherwise, a "mini" gc will
724 ## take place if the file .needsgc exists.
726 ## A "mini" gc is similar to "git gc --auto" in that it may not end up actually
727 ## doing anything unless the right conditions are present so it's not a burden
728 ## to run it often. If the file .needsgc exists, a "mini" gc will occur at
729 ## the next opportunity.
731 ## See the docs/technical/gc.txt and docs/technical/gc-mini.txt files for more
732 ## of the gory details of how garbage collection is performed.
734 ## Note, however, that the .nogc file suppresses ALL gc activity (normal or mini).
737 proj="${1%.git}"
738 shift
739 cd "$cfg_reporoot/$proj.git"
740 [ -d objects/pack ] || { rm -f gfi-packs; mkdir -p objects/pack; }
741 mirror_url="$(get_mirror_url)" || :
742 svn_mirror=
743 ! is_svn_mirror_url "$mirror_url" || svn_mirror=1
744 gfi_mirror=
745 if [ -f gfi-packs ] && [ -s gfi-packs ] && is_gfi_mirror_url "$mirror_url"; then
746 gfi_mirror=1
749 # If git config --bool --get girocco.redelta is explicitly false then automatic
750 # redelta when there are less than $var_redelta_threshold objects will be suppressed.
751 # On the other hand, if git config --get girocco.redelta is "always" then, on a full
752 # gc only, for the final repack, deltas will always be recomputed.
753 # This can be set on a per-project basis to avoid unusual pathological gc behavior.
754 # Setting this will hurt efficiency of the affected repository.
755 # Note that fast-import packs ALWAYS get new deltas regardless of this setting.
756 noreusedeltaopt="--no-reuse-delta"
757 [ "$(git config --bool --get girocco.redelta 2>/dev/null || :)" != "false" ] || noreusedeltaopt=
758 alwaysredelta=
759 [ "$(git config --get girocco.redelta 2>/dev/null || :)" != "always" ] || alwaysredelta=1
761 # Extract any -f or -F or --no-reuse-object or --no-reuse-delta options
762 # to be compatible with the old and new gc.sh versions and avoid ugly argument
763 # duplication in process lists at the same time
764 # Any options found will override the "girocco.redelta" setting
765 recompress=
766 idx=$#
767 while [ $idx -gt 0 ]; do
768 idx=$(( $idx - 1 ))
769 opt="$1"
770 shift
771 case "$opt" in
772 -f|--no-reuse-delta)
773 alwaysredelta=1
774 continue
776 -F|--no-reuse-object)
777 alwaysredelta=1
778 recompress=1
779 continue
781 -?*)
784 printf >&2 '%s\n' "bad non-option argument: $opt"
785 echo >&2 "(Did you perhaps intend to use a --xxx=yyy form?)"
786 exit 1
787 esac
788 [ -z "$opt" ] || set -- "$@" "$opt"
789 done
790 if [ -n "$alwaysredelta" ]; then
791 noreusedeltaopt="--no-reuse-delta"
792 [ -z "$recompress" ] || noreusedeltaopt="--no-reuse-object"
795 trap 'e=$?; rm -f .gc_in_progress; if [ $e != 0 ]; then echo "gc failed dir: $PWD" >&2; fi' EXIT
796 trap 'exit 130' INT
797 trap 'exit 143' TERM
799 # date -R is linux-only, POSIX equivalent is '+%a, %d %b %Y %T %z'
800 datefmt='+%a, %d %b %Y %T %z'
802 isminigc=
803 if [ "${force_gc:-0}" = "0" ] && check_interval lastgc $cfg_min_gc_interval; then
804 if [ -e .needsgc ]; then
805 isminigc=1
806 else
807 progress "= [$proj] garbage check skip (last at $(config_get lastgc))"
808 exit 0
811 if [ -e .nogc ]; then
812 progress "x [$proj] garbage check disabled"
813 exit 0
815 if ! [ -e .nofetch ] && [ -e .clone_in_progress ] && ! [ -e .clone_failed ]; then
816 progress "x [$proj] garbage check disabled (clone in progress)"
817 exit 0
819 if [ -z "$isminigc" ] && [ -e .delaygc ] && [ -e .needsgc ]; then
820 # Eligible for a full gc but .delaygc is set so it would be skipped
821 # However .needsgc is also set so transform it into a mini instead
822 isminigc=1
823 progress "~ [$proj] garbage check delayed but checking mini because .needsgc"
826 if [ -n "$isminigc" ]; then
827 # Perform a "mini" gc
828 # Note that .delaygc is ignored here as that's only intended for full gc
829 lock_gc
830 rm -f .allowgc .needsgc
831 rm -f objects/pack/pack-*_[rful].keep
832 remove_crud
833 coalesce_reflogs
834 prune_reflogs
835 compact_reflogs
836 maintain_auto_gc_hack
837 generate_auto_gc_update
838 miniactive=
839 if [ -f .needspack ]; then
840 miniactive=1
841 progress "+ [$proj] mini garbage check ($(date))"
842 make_needs_pack
844 if [ -z "$cfg_delay_gfi_redelta" ] && [ -n "$gfi_mirror" ]; then
845 # $Girocco::Config::delay_gfi_redelta is false, force redeltification now
846 if [ -z "$miniactive" ]; then
847 miniactive=1
848 progress "+ [$proj] mini garbage check ($(date))"
850 repack_gfi_packs
852 if lotsa_loose_objects; then
853 if [ -z "$miniactive" ]; then
854 miniactive=1
855 progress "+ [$proj] mini garbage check ($(date))"
857 pack_loose_objects
859 # If there aren't at least 10 non-keep, non-bitmap, non-bndl packs then
860 # don't actually process them yet
861 lpo="--exclude-no-idx --exclude-keep --exclude-bitmap --exclude-bndl --quiet"
862 packcnt="$(list_packs --count $lpo objects/pack)" || :
863 if [ "${packcnt:-0}" -ge 10 ]; then
864 if [ -z "$miniactive" ]; then
865 miniactive=1
866 progress "+ [$proj] mini garbage check ($(date))"
868 # if we have at least 10 packs go ahead and pack all refs now too
869 git pack-refs --all --prune
870 if [ -n "$gfi_mirror" ]; then
871 repack_gfi_packs
872 packcnt="$(list_packs --count $lpo objects/pack)" || :
874 # if repack_gfi_packs dropped the pack count to < 10 don't combine
875 if [ "${packcnt:-0}" -ge 10 ]; then
876 combine_small_packs
877 combine_small_loose_packs
878 packcnt="$(list_packs --count $lpo objects/pack)" || :
880 # if we still have more than 10 packs trigger a full gc
881 if [ "${packcnt:-0}" -ge 10 ]; then
882 # We shouldn't be in a .delaygc state at this point, but if
883 # we are then nuke it because we really need a full gc now
884 rm -f .delaygc
885 git config --unset gitweb.lastgc
886 rm -f "$lockf"
887 git update-server-info # just in case
888 progress "- [$proj] mini garbage check triggering full gc too many packs ($(date))"
889 exit 0
892 rm -f "$lockf"
893 if [ -n "$miniactive" ]; then
894 git update-server-info
895 progress "- [$proj] mini garbage check ($(date))"
896 else
897 progress "= [$proj] mini garbage check nothing but crud removal to do ($(date))"
899 exit 0
902 # Avoid unnecessary garbage collections:
903 # 1. If lastreceive is set and is older than lastgc
904 # -AND-
905 # 2. We are not a fork (is_empty_alternates_file) -OR- lastparentgc is older than lastgc
907 # If lastgc is NOT set or lastreceive is NOT set we MUST run gc
908 # If we are a fork and lastparentgc is NOT set we MUST run gc
910 # If the repo is dirty after removing any crud we MUST run gc
912 gcstart="$(date "$datefmt")"
913 skipgc=
914 isfork=
915 is_empty_alternates_file objects/info/alternates || isfork=1
916 lastparentgcsecs=
917 [ -z "$isfork" ] || lastparentgcsecs="$(config_get_date_seconds lastparentgc)" || :
918 lastreceivesecs=
919 if lastreceivesecs="$(config_get_date_seconds lastreceive)" &&
920 [ "${force_gc:-0}" = "0" ] &&
921 lastgcsecs="$(config_get_date_seconds lastgc)" &&
922 [ $lastreceivesecs -lt $lastgcsecs ]; then
923 # We've run gc since we last received, so maybe we can skip,
924 # check if not fork or fork and lastparentgc < lastgc
925 if [ -n "$isfork" ]; then
926 if [ -n "$lastparentgcsecs" ] &&
927 [ $lastparentgcsecs -lt $lastgcsecs ]; then
928 # We've run gc since our parent ran gc so we can skip
929 skipgc=1
931 else
932 # We don't have any alternates (we're not a forK) so we can skip
933 skipgc=1
937 # Prevent any other simultaneous gc operations
938 lock_gc
940 # At this point, if .allowgc or .gc_failed exists, it's now crud to be removed
941 rm -f .allowgc .gc_failed
943 # Ideally we would do this in post-receive, but that would mean duplicating the
944 # logic so it's available in the chroot jail and that's highly undesirable
945 # Instead, since the first gc will be triggered immediately following the first
946 # push, we do the check here as it's quick and harmless if HEAD is already valid
947 check_and_set_head || :
949 # Always get rid of crud
950 remove_crud
952 # Always perform reflogs maintenance
953 coalesce_reflogs
954 prune_reflogs
955 compact_reflogs
957 # Always maintain auto gc hack
958 maintain_auto_gc_hack
959 generate_auto_gc_update
961 # Run 'git svn gc' now for svn mirrors
962 if [ -n "$svn_mirror" ]; then
963 git svn gc || :
966 # Skip the actual gc if .delaygc is set
967 if [ -e .delaygc ]; then
968 progress "x [$proj] garbage check delayed (except for crud removal)"
969 rm -f "$lockf"
970 exit 0
973 # Do not skip gc if the repo is dirty
974 if [ -n "$skipgc" ] && ! is_dirty; then
975 progress "= [$proj] garbage check nothing but crud removal to do ($(date))"
976 config_set lastgc "$gcstart"
977 rm -f "$lockf"
978 exit 0
981 bumptime=
982 if [ -n "$isfork" ] && [ -z "$lastparentgcsecs" ]; then
983 # set lastparentgc and then update gcstart to be at least 1 second later
984 config_set lastparentgc "$gcstart"
985 bumptime=1
987 if [ -z "$lastreceivesecs" ]; then
988 # set lastreceive and then update gcstart to be at least 1 second later
989 config_set lastreceive "$gcstart"
990 bumptime=1
992 if [ -n "$bumptime" ]; then
993 sleep 1
994 gcstart="$(date "$datefmt")"
997 progress "+ [$proj] garbage check ($(date))"
999 newdeltas=
1000 [ -z "$alwaysredelta" ] || newdeltas="$noreusedeltaopt"
1001 if [ -z "$newdeltas" ] && [ -n "$gfi_mirror" ]; then
1002 if [ $(list_packs --exclude-no-idx --count objects/pack) -le \
1003 $(list_packs --exclude-no-idx --count --quiet --only gfi-packs) ]; then
1004 # Don't bother with repack_gfi_packs since everything's being repacked
1005 newdeltas="--no-reuse-delta"
1008 if [ -z "$newdeltas" ] && [ -n "$noreusedeltaopt" ] &&
1009 [ $(list_packs --all --exclude-no-idx --count-objects objects/pack) -le $var_redelta_threshold ]; then
1010 # There aren't enough objects to worry about so just redelta to get the best pack
1011 newdeltas="--no-reuse-delta"
1013 if [ -z "$newdeltas" ]; then
1014 # Since we're not going to recompute deltas overall, we need to do the
1015 # "mini" maintenance so that we can get more optimal deltas
1016 [ -z "$noreusedeltaopt" ] || make_needs_pack
1017 repack_gfi_packs
1018 force_single_pack_redelta=
1019 [ -n "$gfi_mirror" ] || [ -n "$svn_mirror" ] || force_single_pack_redelta=1
1020 [ -z "$noreusedeltaopt" ] || combine_small_packs $force_single_pack_redelta
1021 [ -z "$noreusedeltaopt" ] || combine_small_loose_packs $force_single_pack_redelta
1025 ## Safe Pruning In Forks
1027 ## We are about to perform garbage collection. We do NOT use the "git gc" or
1028 ## the "git repack" commands directly as they do not provide enough control over
1029 ## the fine details. However, we DO maintain a "gc.pid" file during our garbage
1030 ## collection so that a simultaneous "git gc" by an administrator will be
1031 ## blocked (and similarly we refuse to start garbage collection if we cannot
1032 ## create the "gc.pid" file).
1034 ## When we say "gc" in the below description we are referring to our "gc.sh"
1035 ## script, NOT the "git gc" command.
1037 ## If the project we are running garbage collection (gc) on has any forks we
1038 ## must be careful not to remove any objects that while no longer referenced by
1039 ## this project (the parent) are still referenced by one or more forks (the
1040 ## children) otherwise the children will become corrupt and we can't abide
1041 ## corrupt children.
1043 ## One way to accomplish this is to simply hard-link all currently existing
1044 ## loose objects and packs in the parent into all the children that refer to the
1045 ## parent (via a line in their objects/info/alternates file) before beginning
1046 ## the gc operation and then relying on a subsequent gc in the child to clean up
1047 ## any excess objects/packs. We used to use this strategy but it's very
1048 ## inefficient because:
1050 ## 1. The disk space used by the old pack(s)/object(s) will not be reclaimed
1051 ## until all children (and their children, if any) run gc by which time
1052 ## it's quite possible the topmost parent will have run gc again and
1053 ## hard-linked yet another old pack down to its children (not to mention
1054 ## loose objects).
1056 ## 2. When using the "-A" option with "git repack", any new objects in the
1057 ## parent that are not referenced by children will continually get
1058 ## exploded out of the hard-linked pack in the children whenever the
1059 ## children run gc.
1061 ## 3. To avoid suboptimal and/or unnecessarily many packs being hard-linked
1062 ## into child forks, we must run the "mini" gc maintenance before we
1063 ## perform the hard-linking into the children which provides yet another
1064 ## source of inefficiency.
1066 ## While we were still using the "-A" option to "git repack" (that was not
1067 ## always the case) to guarantee we can access old ref values for long enough
1068 ## to send out a meaningful mail.sh notification, another, more efficient,
1069 ## option became available to prevent corruption of child forks that continue
1070 ## to refer to objects that are no longer reachable from any ref in the parent.
1072 ## The only things that need be copied (or hard-linked) into the child fork(s)
1073 ## are those objects that have become unreachable from any ref in the parent.
1075 ## When we were using the "git repack -A -d" + "git prune --expire=1.day.ago"
1076 ## technique, the only objects that could ever be removed were loose objects
1077 ## that "git prune" determined were expired. In that case, loose objects were
1078 ## all that need be hard-linked down to child forks in order to avoid
1079 ## corruption of any child fork(s).
1081 ## The "git repack -A -d" + "git prune --expire=1.day.ago" + hard-linking loose
1082 ## objects to child forks technique remains fundamentally sound from the
1083 ## perspective of supporting simultaneous gc and push and keeping newly
1084 ## unreachable objects around long enough to be sure we can send out meaningful
1085 ## ref change notifications and never corrupting any child forks and never
1086 ## persisting the lifetime of large old packs containing mostly duplicate or
1087 ## unreachable objects as gc percolates through a project's entire fork tree.
1089 ## However, that technique suffers from one potential prodigious pitfall.
1091 ## Unreachable objects come flying out of their packs to splatter all over the
1092 ## objects subdirectories possibly creating a huge, inefficient mess.
1094 ## Often this is not an issue. Even with a lot of rebasing going on, usually
1095 ## the only objects that will splatter are some commits, trees and the odd blob
1096 ## here and there. Not enough to be overly concerned about.
1098 ## However, for the reppository that frequently experiences a lot of non-fast-
1099 ## forward updates and/or outright ref deletion, the number of objects suddenly
1100 ## popping out of their packs at "git repack -A -d" time can be overwhelming.
1102 ## To avoid this issue we now use a four phase pack creation strategy.
1103 ## This will result in creation of up to four packs (instead of at most one).
1105 ## I. A complete pack (with bitmaps if appropriate) gets created including
1106 ## only "reachable" objects from all refs/... refs plus HEAD. This will
1107 ## also serve as the virtual bundle for the repository.
1109 ## II. A pack of recently-became-unreachable objects and friends is created.
1110 ## (The "friends" are ref logs, linked working tree HEADs and indicies.)
1111 ## Because both the pre-receive and update.sh script record all ref
1112 ## changes we can easily choose the cut off point for "recently".
1113 ## It is only the fact we maintain those logs in the reflogs subdirectory
1114 ## that allows this step to be possible.
1116 ## III. If the repository has any forks with a non-zero length alternates file,
1117 ## yet another pack of "--keep-unreachable" objects is generated that will
1118 ## not actually be kept in the parent, but hard-linked into all the forks.
1120 ## IV. Finally, after running "git prune-packed", any remaining loose objects
1121 ## are migrated into a pack of their own.
1123 ## We then remove any non-.keep packs that existed before we started the
1124 ## process being careful to keep any same-pack pushes for the "Push Pack Redux"
1125 ## race condition (see docs/technical/gc.txt).
1127 ## By using "git pack-objects" directly we are able to accomplish this with
1128 ## very little additional effort.
1130 ## The packs produced by (III) are treated almost like ".keep" packs by child
1131 ## forks in that the objects in them are never repacked into any other
1132 ## "--keep-unreachable" packs (but they can migrate into phase I or II packs)
1133 ## and those phase III packs are then hard-linked into any grandchild forks.
1135 ## This avoids the space explosion that could occur if each fork level ended
1136 ## up duplicating the "--keep-unreachable" pack space by repacking those
1137 ## objects (essentially breaking the hard-link to the single copy of those
1138 ## objects).
1140 ## While it is true that each level of forks could potentially add yet another
1141 ## phase III pack to be hard-linked down to its children, such packs will only
1142 ## include unreachable objects not already in any phase III packs that were
1143 ## received from the parent.
1145 ## The space for the phase III packs will not be reclaimed until the gc
1146 ## finishes percolating through the entire "fork tree" of a project.
1148 ## This is not much different than the "git repack -A -d" situation where
1149 ## all the loose objects are hard-linked down into child forks. In that
1150 ## case forks that actually need any of those objects could gradually reduce
1151 ## the number of objects hard-linked into deeper fork levels.
1153 ## The difference with a phase III "--keep-unreachable" pack is that there
1154 ## cannot be any gradual reduction like that since it would require repacking
1155 ## the pack and breaking the hard-link thereby increasing storage space. The
1156 ## storage will instead always be reclaimed all at once when all of the
1157 ## projects in the "fork tree" complete their gc.
1159 ## However, the belief is that the huge space win by having all the
1160 ## unreachable objects packed up together far eclipses (when many objects are
1161 ## involved, the single-pack version can end up using 1/20th or less of the
1162 ## disk space compared to having them all as loose objects) any brief minor
1163 ## space savings that might occur under the "git repack -A -d" loose object
1164 ## system prior to the gc collection completing for all the projects in the
1165 ## "fork tree".
1169 ## utility functions
1172 make_packs_ugw() {
1173 find -L "$1" -maxdepth 1 -type f ! -perm -ug+w \
1174 -name "pack-$octet20*.pack" -exec chmod ug+w '{}' + || :
1175 } 2>/dev/null
1177 get_index_tree() {
1178 if [ -s "$1" ]; then
1179 GIT_INDEX_FILE="$1"
1180 export GIT_INDEX_FILE
1181 git write-tree 2>/dev/null || :
1182 unset GIT_INDEX_FILE
1186 get_detached_head() {
1187 if [ -s "$1" ] && read -r _head <"$1" 2>/dev/null; then
1188 case "$_head" in $octet20*)
1189 echo "$_head"
1190 esac
1194 # single argument must be a top-level GIT_DIR
1195 # output will be (one per line, zero or more lines) full hash
1196 # values (i.e. 40 or more hex digits) from possible detached head
1197 # "single level" refs (e.g. "FETCH_HEAD" "MERGE_HEAD" etc.) from
1198 # files in the specified directory that have a "suitable" name and
1199 # have a length >= 40 and <= 1000 where EVERY line in a file MUST
1200 # match a 40 digit (or longer) hex string or that file will be ignored.
1201 # In a nod to Git each line first has any tab and anything following truncated.
1202 # NO sorting NOR "uniq"ing NOR "--batch-check"ing is performed on the output.
1203 # Also note that "HEAD" is NOT EXCLUDED and if detached will be output!
1204 get_detached_friends() {
1205 if [ -n "$1" ] && [ -d "$1" ]; then
1206 LC_ALL=C find -H "$1" -maxdepth 1 -name '[A-Za-z]*[A-Za-z0-9]' \
1207 -type f -size +39c -size -1001c -exec awk '
1208 BEGIN {exit}
1209 function process(fn, fnt, hashes, hcnt, fl, hi) {
1210 fnt = fn; sub(/^.*\//, "", fnt)
1211 if (length(fnt) > 31 || fnt !~ /^[A-Za-z][A-Za-z0-9_-]*[A-Za-z0-9]$/) return;
1212 hcnt = 0;
1213 while (getline fl < fn) {
1214 sub(/\t.*$/, "", fl)
1215 if (length(fl) < 40 || fl !~ /^[0-9a-fA-F][0-9a-fA-F]*$/) {hcnt = 0; break;}
1216 hashes[++hcnt] = fl;
1218 close(fn)
1219 for (hi=1;hi<=hcnt;++hi) print tolower(hashes[hi]);
1221 END {for (idx=1;idx<ARGC;++idx) process(ARGV[idx])}
1222 ' '{}' '+' 2>/dev/null || :
1226 # get_worktrees_friends
1227 # single argument is a (possibly relative) path to a "worktrees" subdir
1228 # if omitted it defaults to "worktrees"
1229 # any "friends" found in there are output to stdout
1230 get_worktrees_friends() {
1231 if [ -d "${1:-worktrees}" ]; then
1232 find -L "${1:-worktrees}" -mindepth 2 -maxdepth 2 -name HEAD -type f -print |
1233 while read -r _lwth; do
1234 get_detached_head "$_lwth"
1235 get_detached_friends "${_lwth%HEAD}"
1236 get_index_tree "${_lwth%HEAD}index"
1237 done
1241 # compute_extra_reachables
1242 # create lines suitable for a packed-refs file mentioning all the
1243 # other refs we might like to keep.
1244 # the current directory MUST be set to the repository's --git-dir
1245 # the following are included:
1246 # * refs mentioned in repack/packed-refs.extra (if it exists)
1247 # * refs mentioned in reflogs/... files
1248 # * tree(s) created from index file(s)
1249 # * detached linked working tree heads
1250 # Resulting objects are tested for existence and uniqified then output
1251 # one per line under a refs/z* namespace
1252 compute_extra_reachables() {
1254 if [ -s repack/packed-refs.extra ]; then
1255 LC_ALL=C sed <repack/packed-refs.extra -n \
1256 -e 's/^\([0-9A-Fa-f][0-9A-Fa-f]*\).*$/\1/p' \
1257 -e 's/^\^\([0-9A-Fa-f][0-9A-Fa-f]*\).*$/\1/p'
1259 digits8='[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9]'
1260 find -L reflogs -mindepth 1 -maxdepth 1 -type f -name "$digits8*" -exec gzip -c -d -f '{}' + |
1261 LC_ALL=C awk '{print $2; print $3}'
1262 get_detached_friends .
1263 ! [ -f index ] || get_index_tree index
1264 get_worktrees_friends
1266 is_git_dir private &&
1267 [ "$(cd objects && pwd -P)" = "$(cd private/objects && pwd -P)" ] &&
1268 [ "$(cd refs && pwd -P)" != "$(cd private/refs && pwd -P)" ]
1269 then
1270 git --git-dir=private show-ref --head --hash 2>/dev/null || :
1271 get_detached_friends private
1272 ! [ -f private/index ] || get_index_tree private/index
1273 get_worktrees_friends private/worktrees
1275 } | LC_ALL=C sort -u |
1276 git cat-file ${var_have_git_260:+--buffer} --batch-check"${var_have_git_185:+=%(objectname)}" |
1277 LC_ALL=C awk '!/missing/ {num++; print $1 " " "refs/" substr("zzzzzzzzzzzz", 1, length(num)) "/" num}'
1281 ## main gc logic
1284 # Everything else is more efficient if we do this first
1285 # The "--prune" option is the default since v1.5.0 but it serves as "documentation" here
1286 git pack-refs --all --prune
1287 [ -e packed-refs ] || >>packed-refs # should never happen...
1289 # If we have a logs directory or a worktrees directory expire the ref logs now
1290 # Note that Git itself does not use either --rewrite or --updateref, so neither do we
1291 ! [ -d logs ] && ! [ -d worktrees ] || eval git reflog expire --all "${quiet:+>/dev/null 2>&1}" || :
1293 make_repack_dir
1294 ! [ -e .gc_failed ] || exit 1
1295 rm -f .gc_in_progress # make sure
1296 touch .gc_in_progress # it's truly fresh
1297 rm -f bundles/* objects/pack/pack-*.bndl
1298 # These only exist for a brief time before the packs loose their _f suffix
1299 # "Push Pack Redux" does not apply to these since they were only ever present with _f
1300 rm -f objects/pack/pack-*_f.keep
1301 # This is perhaps a bit aggressive in that if we're suffering from "Push Pack Redux"
1302 # and somehow we get run again immediately after the run where "Push Pack Redux" happened
1303 # and we have garbage collection forced, there's just the barest, almost negligible,
1304 # possibility that the "Push Pack Redux" ref updates _still_ have not happened and we
1305 # should not be removing _r .keep files. None of the normal Girocco processing can
1306 # cause this. The second run of this script would have to use the force gc option
1307 # for it to even be possible in the first place. What's much more likely is that
1308 # the initial run of this script was somehow interrupted in the middle before it
1309 # could get rid of the _r .keep file itself in which case it's better to get rid of
1310 # it now to avoid keeping something around that would perturb our nice and neat gc
1311 rm -f objects/pack/pack-*_r.keep
1312 # We will add .keep files for _u and _l packs if and when we run phase III
1313 # Otherwise they need to not have any .keep files during phases I and II
1314 rm -f objects/pack/pack-*_[ul].keep
1316 # We need to make sure that any non-Girocco (barely tolerated) Git object creation
1317 # activity will be able to "freshen" the pack containing a pre-existing object
1318 # that's being written. This really should not be necessary as the pre-receive
1319 # hook should make sure this takes place for any incoming pushes.
1320 # However, do it here anyway just in case.
1321 make_packs_ugw objects/pack
1323 # This is only effective with Git v2.3.5 and later and it will only matter when
1324 # we are using one of the "internal_rev_list" modes of pack-objects
1325 # (the combine-packs.sh script never uses any of those modes)
1326 # The "git repack" and "git prune" commands always set this internally themselves
1327 # It makes no difference if there's no repository corruption
1328 GIT_REF_PARANOIA=1 && export GIT_REF_PARANOIA
1330 # All of the options we might want to use with pack-objects were supported
1331 # at some point prior to Git version v1.6.6 which is the minimum version that
1332 # Girocco now requires. Except for one (--use-bitmap-index). Several of them
1333 # are "boiler plate" options we always want to use so we bundle them up here.
1334 pkopt="--delta-base-offset --keep-true-parents --non-empty --all-progress-implied"
1335 # We want to use --include-tag, but before Git v2.10.1 it would leave out
1336 # "middle" tags (e.g. a tag of a tag of a commit would omit the tagged tag)
1337 # See http://repo.or.cz/git.git/b773ddea2cd3b08c for details
1338 # ("pack-objects: walk tag chains for --include-tag", 2016-09-07, v2.10.1)
1339 # This is not a free check as it matches all refs against refs/tags/ then
1340 # peels all the annotated tags and checks for inclusion. The situation in
1341 # which it would add a tag that was not already included by a reachability
1342 # trace that included tag starting points can only occur if a new tag gets
1343 # pushed during gc pointing to something that would have been packed anyway.
1344 # But, it could happen and, really, compared to gc as a whole it's not that
1345 # expensive to perform (provided we do not get an unconnected pack).
1346 [ -z "$var_have_git_2101" ] || pkopt="$pkopt --include-tag"
1347 pkopt="$pkopt ${quiet:---progress} $packopts"
1349 # The git pack-objects command only supports bitmaps if all objects are being
1350 # packed (the "--all" option) and the "--stdout" option is NOT being used.
1351 # Additionally, while packing, if any encountered reachable objects are
1352 # determined to be "not wanted" then no bitmap index will be written anyway.
1353 # While it is theoretically possible that a project with a non-empty alternates
1354 # file ends up packing all objects (because it does not actually use any of the
1355 # objects found in the alternates), it's very unlikely. And, in the unlikely
1356 # event that did occur, clients would see a message about only using one bitmap
1357 # because Git can only use one bitmap at a time and at least one of the
1358 # alternates is bound to have a bitmap. Therefore if we see a non-empty
1359 # alternates file, we disable writing bitmaps which avoids the warning and any
1360 # possibility of a client warning as well. Also if we are running anything
1361 # before Git v2.1.0 (the effective version for repack.writeBitmaps=true) then
1362 # we also always disable bitmap writing.
1363 wbmopt=
1364 [ -z "$var_have_git_210" ] || wbmopt="--write-bitmap-index"
1365 # More recent versions of pack-objects have optimizations when not using the
1366 # --local option. If we do not have any alternates it's a pointless option.
1367 # If we do have alternates we need to skip writing a bitmap and we cannot
1368 # have a bundle since it must contain all objects.
1369 if [ -n "$isfork" ]; then
1370 lclopt="--local"
1371 wbmopt=
1372 makebndl=
1373 else
1374 lclopt=
1375 makebndl=1
1379 ## Phase I
1382 wbmstr=
1383 [ -n "$wbmopt" ] || wbmstr=" (bitmaps disabled)"
1384 progress "~ [$proj] running primary full gc pack-objects$wbmstr ($(date))"
1386 gotforks=
1387 ! has_forks_with_alternates "$proj" || gotforks=1
1389 # To avoid "Push Pack Redux" (see docs/technical/gc.txt), after collecting the
1390 # initial preexisting non-keep pack list, we rename them so that an incoming push
1391 # pack cannot possibly experience a pack name collision. Git does not require
1392 # use of the "default" pack names, simply that the proper extensions are used.
1393 # We rename to insert an "_r" just before the extension to avoid "Push Pack Redux"
1394 # name collisions. Later on we may create an "unreachable" pack for hard-linking
1395 # down into forks and it will have an "_u" inserted just before its extension.
1396 packlist="$(list_packs -C objects/pack --all --exclude-no-idx --exclude-keep --quiet .)" || :
1397 oldpacks=
1398 for oldpack in $packlist; do
1399 oldpack="${oldpack%.pack}"
1400 [ -f "objects/pack/$oldpack.pack" ] || {
1401 echo >&2 "[$proj] unable to list old pack files"
1402 exit 1
1404 case "$oldpack" in pre-auto-gc-[12])
1405 # we never disturb pre-auto-gc-1 or pre-auto-gc-2 packs
1406 continue
1407 esac
1408 oldpackhex="${oldpack#pack-}"
1409 if [ "${oldpackhex#*[!0-9a-fA-F]}" != "$oldpackhex" ]; then
1410 # names not exclusively hexadecimal do not need renaming
1411 case "$oldpack" in
1412 pack-$octet20*_l)
1413 # _l packs are treated like still-unpacked loose objects
1414 continue;;
1415 *_f)
1416 # _f packs can only be left over from a previously interrupted gc;
1417 # they need to be renamed to _r now so they're not confused with
1418 # any freshly generated "final" packs (and we already removed
1419 # any pre-existing *_f.keep files so we're good to go)
1422 oldpacks="${oldpacks:+$oldpacks }$oldpack"
1423 continue;;
1424 esac
1426 rename_pack "objects/pack/$oldpack" "objects/pack/${oldpack%_f}_r" || {
1427 echo >&2 "[$proj] unable to rename old pack files"
1428 exit 1
1430 # If the oldpack has a .keep now it means a "Push Pack Redux" is actually
1431 # in progress at this moment and we need to .keep the renamed pack,
1432 # otherwise no "Push Pack Redux" has started yet or it has already finished.
1433 # In either case we're okay because if it's just finished then all ref
1434 # changes have already been made so we don't need a .keep and we will
1435 # see the ref changes and grab all the objects via a reachability trace.
1436 # If it hasn't started yet that's okay because we're done moving that
1437 # name so a complete pack will appear under the old name that we'll
1438 # leave alone.
1439 if [ -f "objects/pack/$oldpack.keep" ]; then
1440 echo "Push Pack Redux" >"objects/pack/${oldpack%_f}_r.keep"
1441 else
1442 oldpacks="${oldpacks:+$oldpacks }${oldpack%_f}_r"
1444 done
1446 # We wish to keep deltas from our last full pack so if we're not redeltaing
1447 # then make sure the .pack associated with the .bitmap has a newer mod time
1448 # (If there is no .bitmap then touch the pack with the most objects instead.)
1449 if [ -z "$newdeltas" ]; then
1450 bmpack="$(list_packs --exclude-no-bitmap --exclude-no-idx --max-matches 1 objects/pack)"
1451 [ -n "$bmpack" ] || bmpack="$(list_packs --exclude-no-idx --max-matches 1 --object-limit -1 --include-boundary objects/pack)"
1452 if [ -n "$bmpack" ] && [ -f "$bmpack" ] && [ -s "$bmpack" ]; then
1453 sleep 1
1454 touch -c "$bmpack" 2>/dev/null || :
1455 # We must touch .gc_in_progress here to avoid $bmpack looking
1456 # like it's been "freshened" when redundant packs are removed
1457 # It's okay if they have the same mod time, but POSIX does not
1458 # guarantee an ordering for the "touching" that occurs which is
1459 # why this must be a separate command but needs no "sleep 1"
1460 touch .gc_in_progress
1464 # Now we need to make sure that any "freshening" that takes place will actually
1465 # result in a "newer" modification time than the .gc_in_progress file now has
1466 sleep 1
1468 # We run git pack-objects from the repack subdirectory so we can force
1469 # optimized packs to be generated even for repositories that do not have any
1470 # tagged commits
1471 packs="$(git --git-dir=repack pack-objects </dev/null \
1472 $pkopt --all $newdeltas $lclopt ${wbmopt:---honor-pack-keep} "$@" repack/alt/pack/pack)"
1473 vcnt packcnt $packs
1474 [ $packcnt -eq 1 ] || makebndl=
1477 ## Phase II
1480 progress "~ [$proj] running supplementary gc pack-objects ($(date))"
1482 # Add the "supplementary" refs
1483 compute_extra_reachables >>repack/packed-refs
1485 # Subtract the primary refs
1486 GIT_ALTERNATE_OBJECT_DIRECTORIES="$PWD/repack/alt"
1487 export GIT_ALTERNATE_OBJECT_DIRECTORIES
1489 # For this one we MUST use --local and MUST NOT use --write-bitmap-index
1490 # However, if there is a "logs" subdirectory we need to use --reflog
1491 # We do add it, just in case, if the linked working trees dir is present
1492 # We do not add --indexed-objects as that requires v2.2.0 and it's unclear
1493 # if it properly includes linked working tree index files or not. The
1494 # above compute_extra_reachables has already included all index trees (thereby
1495 # providing proper --indexed-objects support for all Git versions) making the
1496 # option completely unnecessary.
1497 rflopt=
1498 ! [ -d logs ] && ! [ -d worktrees ] || rflopt=--reflog
1499 spacks="$(git --git-dir=repack pack-objects </dev/null \
1500 $pkopt --honor-pack-keep --all $rflopt $newdeltas --local "$@" repack/alt/pack/pack)"
1503 ## Phase III
1506 # There's nothing to do for Phase III unless we have forks that refer to our
1507 # project from their alternates file
1508 hlpacks=
1509 upacks=
1510 if [ -n "$gotforks" ]; then
1512 progress "~ [$proj] running keep-unreachable gc pack-objects for forks ($(date))"
1514 # If we are a fork, any pre-existing _u packs need to have a .keep
1515 # for this phase and be added to the hlpacks list otherwise (we are
1516 # not a fork) pre-existing _u packs are anomalies to be treated like
1517 # regular non-_u packs
1518 if [ -n "$isfork" ]; then
1519 for upack in $(find -L objects/pack -mindepth 1 -maxdepth 1 -name "pack-$octet20*_[ul].pack" -print); do
1520 upack="${upack%.pack}"
1521 [ -e "$upack.keep" ] || echo "unreachable" >"$upack.keep"
1522 case "$upack" in *_l);;*)
1523 hlpacks="${hlpacks:+$hlpacks }${upack#objects/pack/pack-}"
1524 esac
1525 done
1527 # Using either --no-reuse-delta or --no-reuse-object together with the
1528 # --keep-unreachable option is a very, very, very bad idea when good
1529 # packs are the desired outcome. If newdeltas are being generated
1530 # then we pack to a temp name, and use combine-packs.sh to get a better
1531 # pack as the result to avoid making a bad --keep-unreachable pack
1532 pfx=
1533 [ -z "$newdeltas" ] || pfx="ku"
1534 upacks="$(git --git-dir=repack pack-objects </dev/null \
1535 $pkopt --honor-pack-keep --all $rflopt --keep-unreachable --local "$@" repack/alt/pack/${pfx}pack)"
1536 if [ -n "$upacks" ] && [ -n "$newdeltas" ]; then
1537 progress "~ [$proj] rebuilding keep-unreachable pack deltas"
1538 oldupacks="$upacks"
1539 upacks="$(
1540 printf "repack/alt/pack/${pfx}pack-%s.pack\n" $oldupacks |
1541 run_combine_packs --names --weak-naming --non-empty --all-progress-implied ${quiet:---progress} \
1542 $packopts $newdeltas "$@" repack/alt/pack/pack)"
1543 eval rm -f "$(printf \""repack/alt/pack/${pfx}pack-%s.*"\"" " $oldupacks)"
1545 for upack in $upacks; do
1546 rename_pack "repack/alt/pack/pack-$upack" "repack/alt/pack/pack-${upack}_u"
1547 done
1548 rm -f objects/pack/pack-*_[ul].keep
1549 [ -z "$hlpacks" ] && [ -z "$upacks" ] ||
1550 progress "~ [$proj] hard-linking keep-unreachable pack(s) into immediate child forks"
1552 # We have to update the lastparentgc time in the child forks even if they do not get any
1553 # new "unreachable packs" because they need to run gc just in case the parent now has some
1554 # objects that used to only be in the child so they can be removed from the child.
1555 # For example, a "patch" might be developed first in a fork and then later accepted into
1556 # the parent in which case the objects making up the patch in the child fork are now
1557 # redundant (since they're now in the parent as well) and need to be removed from the
1558 # child fork which can only happen if the child fork runs gc.
1559 lastparentgc="$(date "$datefmt")"
1561 # It is enough to copy objects just one level down and get_repo_list
1562 # takes a regular expression (which is automatically prefixed with '^')
1563 # so we can easily match forks exactly one level down from this project
1564 forkdir="$proj"
1565 get_repo_list "$forkdir/[^/:][^/:]*:" |
1566 while read fork; do
1567 # Ignore forks that do not exist or are symbolic links
1568 ! [ -L "$cfg_reporoot/$fork.git" ] && [ -d "$cfg_reporoot/$fork.git" ] ||
1569 continue
1570 # Or have an empty alternates file
1571 ! is_empty_alternates_file "$cfg_reporoot/$fork.git/objects/info/alternates" ||
1572 continue
1573 runupdate=
1574 # Match hlpacks in parent project if any
1575 if [ -n "$hlpacks" ]; then
1576 mkdir -p "$cfg_reporoot/$fork.git/objects/pack"
1577 eval ln -f "$(printf '"objects/pack/pack-%s.pack" ' $hlpacks)" \
1578 "$(printf '"objects/pack/pack-%s.idx" ' $hlpacks)" \
1579 '"$cfg_reporoot/$fork.git/objects/pack/"'
1580 runupdate=1
1582 # Match upacks in repack/alt area if any
1583 if [ -n "$upacks" ]; then
1584 mkdir -p "$cfg_reporoot/$fork.git/objects/pack"
1585 eval ln -f "$(printf '"repack/alt/pack/pack-%s_u.pack" ' $upacks)" \
1586 "$(printf '"repack/alt/pack/pack-%s_u.idx" ' $upacks)" \
1587 '"$cfg_reporoot/$fork.git/objects/pack/"'
1588 runupdate=1
1590 if ! [ -e "$cfg_reporoot/$fork.git/.needsgc" ]; then
1591 # Trigger a mini gc in the fork if it now has too many packs
1592 packs="$(list_packs --quiet --count --exclude-no-idx --exclude-keep "$cfg_reporoot/$fork.git/objects/pack")" || :
1593 if [ -n "$packs" ] && [ "$packs" -ge 20 ]; then
1594 >"$cfg_reporoot/$fork.git/.needsgc"
1597 [ -z "$runupdate" ] || git --git-dir="$cfg_reporoot/$fork.git" update-server-info
1598 # Update the fork's lastparentgc date (must be more recent than $gcstart)
1599 git --git-dir="$cfg_reporoot/$fork.git" config gitweb.lastparentgc "$lastparentgc"
1600 done
1603 # Now move any primary/supplementary packs back into objects/pack
1604 # then drop any "unfreshened" redundant packs and clear repack/alt
1606 # First make sure the primary pack(s) have the most recent mod time
1607 if [ -n "$packs" ]; then
1608 [ -z "$spacks" ] || sleep 1
1609 printf 'repack/alt/pack/pack-%s.pack\n' $packs | xargs touch -c 2>/dev/null || :
1612 # Move the packs into place but with a _f suffix and a .keep file for now
1613 for pack in $packs $spacks; do
1614 rename_pack "repack/alt/pack/pack-$pack" "objects/pack/pack-${pack}_f"
1615 [ -e "objects/pack/pack-${pack}_f.keep" ] ||
1616 echo "final" >"objects/pack/pack-${pack}_f.keep"
1617 done
1619 # It's possible that one of the $oldpacks had a .bitmap, got renamed (along
1620 # with its .bitmap) and then got "freshened" causing us to not remove it
1621 # However, if $wbmopt is set we most likely now have TWO .bitmap packs!
1622 # This can produce ugly warnings we don't want and possibly get the wrong
1623 # bitmap used since only one .bitmap file can ever be used by Git.
1624 # If this has happened, the .bitmap we want to discard will always have
1625 # an _r suffix so we can just zap any such now since it will leave the pack.
1626 [ -z "$wbmopt" ] || rm -f objects/pack/pack-*_r.bitmap || :
1628 # Remove the redundant packs that have not since been "freshened"
1629 # This does not completely eliminate the race condition window (Girocco's own
1630 # activites -- gc/fetch/receive are immune to the race) but it substantially
1631 # shrinks it down to just the time after the find but before the following rm
1632 >repack/oldpacks
1633 [ -z "$oldpacks" ] ||
1634 printf 'objects/pack/%s.pack\n' $oldpacks |
1635 LC_ALL=C sort >repack/oldpacks
1636 find -L objects/pack -maxdepth 1 -type f -name "pack-$octet20*.pack" -newer .gc_in_progress -print |
1637 LC_ALL=C sort >repack/freshened
1638 deadpacks="$(LC_ALL=C join -v 1 repack/oldpacks repack/freshened | LC_ALL=C sed 's/\.pack$//')"
1639 [ -z "$deadpacks" ] ||
1640 eval echo "$(printf '"%s".* ' $deadpacks)" | xargs rm -f || :
1642 # No need for this anymore
1643 rm -rf repack/alt objects/pack/repack
1644 unset GIT_ALTERNATE_OBJECT_DIRECTORIES
1647 ## Phase IV
1650 progress "~ [$proj] running gc prune-packed"
1652 # We do not want the redundant packs or any new "--keep-unreachable" pack(s) to be
1653 # present while running prune-packed. We try to guarantee that any loose object
1654 # (or any object present in a pack with an _l suffix which was created by mini gc)
1655 # that's unreachable persists for at least one $Girocco::Config::min_gc_interval
1656 # (not withstanding administrator interference to force earlier gc to occur).
1657 # If we were to include the redundant/keep-unreachable pack(s) when running
1658 # prune-packed and a loose unreachable object happened to be duplicated in one
1659 # of them we would end up removing it too soon and void our guarantee.
1660 git prune-packed $quiet
1662 progress "~ [$proj] running loose objects gc pack-objects ($(date))"
1664 # Although Git v2.10.0 and later support a --pack-loose-unreachable option,
1665 # we MUST NOT use it for these reasons:
1666 # 1) We're not interested in expensive "unreachable" at this point, only "loose"
1667 # 2) It produces simply horrid packs about 3.8x times larger than they should be
1668 # 3) We don't require anything more than Git v1.6.6
1669 # The only way we could see any _o pack files at this point is if one got
1670 # "freshened" while we were running gc. If that happens then it gets to live on
1671 # until the next full gc and we need to include it in the loose repack here.
1672 lpacks="$(list_packs --exclude-no-idx --exclude-no-sfx _l --exclude-no-sfx _o --quiet objects/pack |
1673 run_combine_packs --replace --names --loose --weak-naming --non-empty --honor-pack-keep \
1674 --all-progress-implied ${quiet:---progress} $packopts $newdeltas "$@")"
1676 if [ -n "$lpacks" ]; then
1677 # Make sure any primary pack(s) have a more recent mod time than "unreachable" objects packs
1678 if [ -n "$packs" ]; then
1679 sleep 1
1680 printf 'objects/pack/pack-%s_f.pack\n' $packs | xargs touch -c 2>/dev/null || :
1682 # We need to identify these packs later so we don't combine_packs them
1683 for objpack in $lpacks; do
1684 rename_pack "objects/pack/pack-$objpack" "objects/pack/pack-${objpack}_o" || :
1685 done
1688 # Polish up the final packs now
1689 rm -f objects/pack/pack-*_f.keep
1690 for pack in $packs $spacks; do
1691 rename_pack "objects/pack/pack-${pack}_f" "objects/pack/pack-$pack"
1692 done
1694 if [ -n "$lpacks" ]; then
1695 # Finally zap the corresponding loose objects
1696 progress "~ [$proj] running packed loose objects gc prune-packed"
1697 git prune-packed $quiet
1700 ! [ -e .gc_failed ] || exit 1
1701 # These, if they exist, are now meaningless and need to be removed
1702 rm -f gfi-packs .needsgc .needspack .needspackgc
1704 # Make sure this stays up to date
1705 git update-server-info
1707 # We must make loose objects group writable so that they
1708 # can be freshened by other pushers. Technically we need only do this for
1709 # push projects but to enable mirror projects to be more easily converted to
1710 # push projects, we go ahead and do it for all projects.
1711 # By the time we get here we really shouldn't have any of these, but just in case.
1712 { find -L objects/$octet -type f -name "$octet19*" -exec chmod ug+w '{}' + || :; } 2>/dev/null
1714 # darcs mirrors have a xxx.log file that will grow endlessly
1715 # if this is a mirror and the file exists, shorten it to 10000 lines
1716 # also take this opportunity to optimize the darcs repo
1717 if ! [ -e .nofetch ] && [ -n "$cfg_mirror" ]; then
1718 url="$(config_get baseurl)" || :
1719 case "$url" in darcs://* | darcs+http://* | darcs+https://*)
1720 if [ -n "$cfg_mirror_darcs" ]; then
1721 url="${url%/}"
1722 basedarcs="$(basename "${url#darcs*:/}")"
1723 if [ -f "$basedarcs.log" ]; then
1724 tail -n 10000 "$basedarcs.log" >"$basedarcs.log.$$"
1725 mv -f "$basedarcs.log.$$" "$basedarcs.log"
1727 if [ -d "$basedarcs.darcs" ]; then
1729 cd "$basedarcs.darcs"
1730 # without show_progress suppress non-error output
1731 [ "${show_progress:-0}" != "0" ] || exec >/dev/null
1732 # Note that this does not optimize _darcs/inventories/ :(
1733 darcs optimize || :
1737 esac
1740 # Create a matching .bndl header file for the all-in-one pack we just created
1741 # but only if we're not a fork (otherwise the bundle would not be complete)
1742 # and we are running at least Git version 1.7.2 (pack_is_complete always fails otherwise)
1743 if [ -n "$makebndl" ] && [ -n "$var_have_git_172" ]; then
1744 # There should only be one pack in $packs but do some checking...
1745 # The one we just created will have a .idx and will NOT have a .keep
1746 progress "~ [$proj] creating downloadable bundle header"
1747 pkbase=
1748 pkhead=
1749 IFS= read -r curhead <repack/HEAD.orig || :
1751 [ -s "objects/pack/pack-$packs.pack" ] &&
1752 [ -s "objects/pack/pack-$packs.idx" ] &&
1753 ! [ -e "objects/pack/pack-$packs.keep" ] &&
1754 pkhead="$(pack_is_complete "$PWD/objects/pack/pack-$packs.pack" \
1755 "$PWD/repack/packed-refs.orig" "$curhead")"
1756 then
1757 pkbase="objects/pack/pack-$packs"
1759 if [ -n "$pkbase" ] && [ -n "$pkhead" ]; then
1761 symref=
1762 case "$curhead" in "ref: refs/"?*|"ref:refs/"?*|"refs/"?*)
1763 symref="${curhead#ref:}"
1764 symref="${symref# }"
1765 esac
1766 bndlurl=
1767 [ -z "$cfg_httpbundleurl" ] || bndlurl=" url=$cfg_httpbundleurl/$proj.git/clone.bundle"
1768 echo "# v2 git bundle"
1769 LC_ALL=C sed -ne "/^$octet20$hexdig* refs\/[^ $tab]*\$/ p" <repack/packed-refs.orig
1770 if [ -n "$symref" ]; then
1771 printf "$pkhead HEAD\0symref=HEAD:%s%s\n" "$symref" "$bndlurl"
1772 else
1773 if [ -n "$bndlurl" ]; then
1774 printf "$pkhead HEAD\0%s\n" "${bndlurl# }"
1775 else
1776 echo "$pkhead HEAD"
1779 echo ""
1780 } >"$pkbase.bndl"
1781 bndletag="$("$cfg_basedir/bin/rangecgi" --etag -m 1 "$pkbase.bndl" "$pkbase.pack")" || :
1782 bndlsha="$(printf '%s' "$bndletag" | git hash-object --stdin)" || :
1783 if [ -n "$bndletag" ]; then
1784 case "$bndlsha" in $octet20*)
1785 bndlshatrailer="${bndlsha#????????}"
1786 bndlshaprefix="${bndlsha%$bndlshatrailer}"
1787 bndlname="$(TZ=UTC date +%Y%m%d_%H%M%S)-${bndlshaprefix:-0}"
1788 [ -d bundles ] || mkdir bundles
1789 echo "${pkbase#objects/pack/}.bndl" >"bundles/$bndlname"
1790 echo "${pkbase#objects/pack/}.pack" >>"bundles/$bndlname"
1791 ln -s -f -n "$bndlname" bundles/latest
1792 esac
1797 # Record the size of this repo as the sum of its clone packed-refs + *.pack sizes as 1024-byte blocks
1798 eval "reposizek=$(( $(
1799 echo 0 $(du -k repack/packed-refs.orig $(printf 'objects/pack/pack-%s.pack ' $packs) 2>/dev/null |
1800 LC_ALL=C awk '{print $1}') |
1801 LC_ALL=C sed -e 's/ / + /g') ))"
1802 config_set_raw girocco.reposizek "${reposizek:-0}"
1804 # Now we're finally done with this
1805 rm -rf repack
1807 # We didn't used to do anything about rerere or worktrees but we're
1808 # trying to make nice with linked working trees these days :)
1809 # Maybe even non-bare repositories too, but *shush* about those ;)
1810 if [ -n "$var_have_git_250" ] && [ -d worktrees ]; then
1811 # The value "3.months.ago" is hard-coded into gc.c rather than
1812 # having the default be in worktree.c so we must provide it if
1813 # we get nothing out of the gc.worktreePruneExpire config item
1814 # Prior to Git v2.6.0 the config item was gc.pruneworktreesexpire
1815 # however we just always use the newer name no matter what Git version
1816 expiry="$(git config --get gc.worktreePruneExpire 2>/dev/null)" || :
1817 eval git worktree prune --expire '"${expiry:-3.months.ago}"' "${quiet:+>/dev/null 2>&1}" || :
1819 # git rerere does it right and handles its own default/config'd expiration values
1820 ! [ -d rr-cache ] || eval git rerere gc "${quiet:+>/dev/null 2>&1}" || :
1822 # We use $gcstart here to avoid a race where a push occurs during the gc itself
1823 # and the next future gc could be incorrectly skipped if we used the current
1824 # timestamp here instead
1825 config_set lastgc "$gcstart"
1826 rm -f "$lockf"
1828 progress "- [$proj] garbage check ($(date))"