3 # Invoked from taskd/taskd.pl
6 . @basedir@
/jobd
/gc-util-functions.sh
11 [ "$cfg_permission_control" != "Hooks" ] ||
umask 000
14 # darcs fast-export | git fast-import with error handling
20 { read -r _err1 ||
:; read -r _err2 ||
:; } <<-EOT
22 exec 4>&3 3>&1 1>&4 4>&-
25 "$cfg_basedir"/bin/darcs-fast-export \
26 --export-marks="$(pwd)/dfe-marks" "$1" 3>&- || _e1=$?
31 git_ulimit fast-import \
32 --export-marks="$(pwd)/gfi-marks" \
33 --export-pack-edges="$(pwd)/gfi-packs" \
34 --force 3>&- || _e2=$?
40 [ "$_err1" = 0 ] && [ "$_err2" = 0 ]
44 # bzr fast-export | git fast-import with error handling
52 { read -r _err1 ||
:; read -r _err2 ||
:; } <<-EOT
54 exec 4>&3 3>&1 1>&4 4>&-
57 bzr fast-export --plain \
58 --export-marks="$(pwd)/bfe-marks" "$1" 3>&- || _e1=$?
63 git_ulimit fast-import \
64 --export-marks="$(pwd)/gfi-marks" \
65 --export-pack-edges="$(pwd)/gfi-packs" \
66 --force 3>&- || _e2=$?
72 [ "$_err1" = 0 ] && [ "$_err2" = 0 ]
76 clear_all_objects_and_packs
() {
77 if [ -d objects
]; then
78 # make sure the repository is not left broken
79 printf '%s\n' 'ref: refs/heads/master' >HEAD ||
:
80 rm -f packed-refs ||
:
81 find -H refs objects
-type f
-exec rm -f '{}' + >/dev
/null
2>&1 ||
:
82 ! [ -d htmlcache
] ||
{ >htmlcache
/changed
; } 2>/dev
/null ||
:
90 # We must now close the .clonelog file that is open on stdout and stderr
92 # It would be nice if git propagated the SIGXFSZ error on up to the shell,
93 # perhaps it will at some point in the future. In any case, the only file
94 # that might be too big would end up in the objects subdirectory.
95 # Search for any files of size $cfg_max_file_size512 blocks (if set) or
96 # larger and trigger the too big failure that way as well.
98 if [ "${cfg_max_file_size512:-0}" != "0" ]; then
99 toobig
="$(find -H objects -type f -size +$(( $cfg_max_file_size512 - 1 )) -print 2>/dev/null |
102 failaddrs
="$(config_get owner)" ||
:
103 ccadm
="${cfg_admincc:-0}"
105 if [ -n "$toobig" ] ||
[ "${exit_err:-0}" = "${var_xfsz_err:-999}" ]; then
107 reposize
="$(cd objects && du -sk . | LC_ALL=C awk '{print $1}')" ||
:
108 if [ -n "$reposize" ]; then
109 if [ $reposize -lt 5120 ]; then
110 reposize
="$reposize KiB"
112 reposize
="$(( $reposize / 1024 ))"
113 if [ $reposize -lt 5120 ]; then
114 reposize
="$reposize MiB"
116 reposize
="$(( $reposize / 1024 ))"
117 reposize
="$reposize GiB"
123 The source repository${reposize:+ ($reposize)} exceeds our maximum allowed repository size."
124 clear_all_objects_and_packs
127 if [ "${exit_objs:-0}" != "0" ]; then
131 The source repository${exit_objs:+ ($exit_objs objects)} exceeds our maximum allowed object limit."
132 clear_all_objects_and_packs
134 if [ -n "$xfsz_err" ] ||
[ -n "$xobjs_err" ]; then
135 # Mark as an exceeds limit clone failure and remember the exceeds
136 # message(s) in both .clone_failed_exceeds_limit and .clonelog
137 >.clone_failed_exceeds_limit
138 if [ -n "$xfsz_err" ]; then
139 printf '%s\n' "${xfsz_err#??}" >>.clone_failed_exceeds_limit
140 printf '%s\n' "${xfsz_err#?}" >>.clonelog
142 if [ -n "$xobjs_err" ]; then
143 printf '%s\n' "${xobjs_err#??}" >>.clone_failed_exceeds_limit
144 printf '%s\n' "${xobjs_err#?}" >>.clonelog
146 # Remove the .clone_failed file to prevent "restarting" the clone since
147 # restarting it will not cure the fact that it exceeds allowed limits
148 # And the .clone_in_progress file has to go at the same time
149 rm -f .clone_in_progress .clone_failed
151 ! [ -d htmlcache
] ||
{ >htmlcache
/changed
; } 2>/dev
/null ||
:
152 [ "$ccadm" = "0" ] ||
[ -z "$cfg_admin" ] ||
153 if [ -z "$failaddrs" ]; then failaddrs
="$cfg_admin"; else failaddrs
="$failaddrs,$cfg_admin"; fi
154 [ -z "$failaddrs" ] ||
157 Condolences. The clone of project $proj just failed.$xfsz_err$xobjs_err
160 * Project settings: $cfg_webadmurl/editproj.cgi?name=$(echo "$proj" | LC_ALL=C sed -e 's/[+]/%2B/g')
162 The project settings link may be used to adjust the settings
163 and restart the clone in order to try the clone again.
165 if [ -f .clonelog
] && [ -r .clonelog
]; then
169 loglines
=$
(LC_ALL
=C
wc -l <.clonelog
)
170 if [ $loglines -le 203 ]; then
173 head -n 100 .clonelog
175 echo "[ ... elided $(( $loglines - 200 )) middle lines ... ]"
177 tail -n 100 .clonelog
180 } | mailref
"clone@$cfg_gitweburl/$proj.git" -s "[$cfg_name] $proj clone failed" "$failaddrs" ||
:
183 # removes any git-svn leftovers
184 cleanup_git_svn_leftovers
() {
186 # Remove any stale git-svn temp files
187 # The git-svn process creates temp files with random 10 character names
188 # in the root of $GIT_DIR. Unfortunately they do not have a recognizable
189 # prefix, so we just have to kill any files with a 10-character name.
190 # All characters are chosen from
191 # [A-Za-z0-9_] so we can at least check that and fortunately the only
192 # collision is 'FETCH_HEAD' but that doesn't matter.
193 # There may also be temp files with a Git_ prefix as well.
194 _randchar
='[A-Za-z0-9_]'
195 _randchar2
="$_randchar$_randchar"
196 _randchar4
="$_randchar2$_randchar2"
197 _randchar10
="$_randchar4$_randchar4$_randchar2"
198 find -L .
-maxdepth 1 -type f
-name "$_randchar10" -exec rm -f '{}' + ||
:
199 find -L .
-maxdepth 1 -type f
-name "Git_*" -exec rm -f '{}' + ||
:
202 # removes all leftovers from a previous failed clone attempt
203 cleanup_failed_clone
() {
205 # Remove any left-over svn-remote.svn or remote.origin config
206 git config
--remove-section svn-remote.svn
2>/dev
/null ||
:
207 git config
--remove-section remote.origin
2>/dev
/null ||
:
209 # If there is a remote-template.origin section, pre-seed the
210 # remote.origin section with its contents
211 git config
--get-regexp '^remote-template\.origin\..' |
212 while read name value
; do
213 if [ -n "$name" ] && [ -n "$value" ]; then
214 git config
"remote${name#remote-template}" "$value"
218 # Any pre-existing FETCH_HEAD from a previous clone failed or not is
219 # now garbage to be removed
222 # Remove any stale ref locks
223 clear_stale_ref_locks
225 # Remove any left-over svn dir from a previous failed attempt
228 # Remove any left-over .darcs dirs from a previous failed attempt
231 # Remove any left-over repo.hg dir from a previous failed attempt
234 # Remove any left-over import/export/temp files from a previous failed attempt
235 rm -f bfe-marks dfe-marks hg2git-heads hg2git-mapping hg2git-marks
* hg2git-state \
236 gfi-marks gfi-packs .pkts-temp .refs-temp
238 # Remove any git-svn junk
239 cleanup_git_svn_leftovers
241 # We want a gc right after the clone, so re-enable that just in case.
242 # There's a potential race where we could add it and gc.sh could remove
243 # it, but we'll reunset lastgc just before we remove .delaygc at the end.
244 [ -e .delaygc
] ||
>.delaygc
245 git config
--unset gitweb.lastgc
2>/dev
/null ||
:
247 # Remove all pre-existing refs
249 git for-each-ref
--format='delete %(refname)' | git_updateref_stdin
2>/dev
/null ||
:
251 # The initial state before a clone starts has HEAD as a symbolic-ref to master
252 git symbolic-ref HEAD refs
/heads
/master
254 # HEAD is no longer "ok"
255 git config
--unset girocco.headok
2>/dev
/null ||
:
257 # We, perhaps, ought to remove any packs/loose objects now, but the next gc
258 # will get rid of any extras. Also, if we're recloning the same thing, any
259 # preexisting packs/loose objects containing what we're recloning will only
260 # speed up the reclone by avoiding some disk writes. So we don't kill them.
262 # It's just remotely possible that a bunch of failures in a row could
263 # create a big mess that just keeps growing and growing...
264 # Trigger a .needsgc if that happens.
265 check_and_set_needsgc
269 cd "$cfg_reporoot/$proj.git"
272 ! [ -e .delaygc
] ||
>.allowgc ||
:
274 trap "exit_err=$?; echo '@OVER@'; touch .clone_failed; send_clone_failed" EXIT
275 echo "Project: $proj"
276 echo " Date: $(TZ=UTC date '+%Y-%m-%d %T UTC')"
278 [ -n "$cfg_mirror" ] ||
{ echo "Mirroring is disabled" >&2; exit 1; }
279 url
="$(config_get baseurl)" ||
:
280 case "$url" in *" "*|
*" "*|
"")
281 echo "Bad mirror URL (\"$url\")"
287 # Record original mirror type for use by update.sh
288 mirror_type
="$(get_url_mirror_type "$url")"
289 git config girocco.mirrortype
"$mirror_type"
291 echo "Mirroring from URL \"$url\""
294 if [ "$cfg_project_owners" = "source" ]; then
295 config
set owner
"$(ls -ldH "${url#file://}" 2>/dev/null | LC_ALL=C awk '{print $3}')"
298 mailaddrs
="$(config_get owner)" ||
:
299 [ -z "$cfg_admin" ] ||
300 if [ -z "$mailaddrs" ]; then mailaddrs
="$cfg_admin"; else mailaddrs
="$mailaddrs,$cfg_admin"; fi
302 # Make sure we don't get any unwanted loose objects
303 # Starting with Git v2.10.0 fast-import can generate loose objects unless we
304 # tweak its configuration to prevent that
305 git_add_config
'fetch.unpackLimit=1'
306 # Note the git config documentation is wrong
307 # transfer.unpackLimit, if set, overrides fetch.unpackLimit
308 git_add_config
'transfer.unpackLimit=1'
309 # But not the Git v2.10.0 and later fastimport.unpackLimit which improperly uses <= instead of <
310 git_add_config
'fastimport.unpackLimit=0'
313 echo "Initiating mirroring..."
318 # remember the starting time so we can easily combine fetched loose objects
319 # we sleep for 1 second after creating .needspack to make sure all objects are newer
320 if ! [ -e .needspack
]; then
327 svn
://* | svn
+http
://* | svn
+https
://* | svn
+file://* | svn
+ssh://*)
328 [ -n "$cfg_mirror_svn" ] ||
{ echo "Mirroring svn is disabled" >&2; exit 1; }
329 # Allow the username to be specified in the "svn-credential.svn.username"
330 # property and the password in the "svn-credential.svn.password" property
331 # Use an 'anonsvn' username by default as is commonly used for anonymous svn
332 # Default the password to the same as the username
333 # The password property will be ignored unless a username has been specified
334 if svnuser
="$(git config --get svn-credential.svn.username)" && [ -n "$svnuser" ]; then
335 if ! svnpass
="$(git config --get svn-credential.svn.password)"; then
340 case "$url1" in ?
*"@"?
*)
341 urlsch
="${url%%://*}"
342 url
="$urlsch://${url#*@}"
345 # As a fallback, check in the URL, just in case
349 case "$url1" in ?
*"@"?
*)
350 urlsch
="${url%%://*}"
351 url
="$urlsch://${url#*@}"
353 svnuser
="${url1%%:*}"
354 if [ -n "$svnuser" ]; then
356 case "$url1" in *":"*)
361 if [ -z "$svnuser" ]; then
366 GIT_ASKPASS_PASSWORD
="$svnpass"
367 export GIT_ASKPASS_PASSWORD
368 # We just remove svn+ here, so svn+http://... becomes http://...
369 # We also remove a trailing '/' to match what git-svn will do
370 case "$url" in svn
+ssh://*) svnurl
="$url";; *) svnurl
="${url#svn+}";; esac
372 # We require svn info to succeed on the URL otherwise it's
373 # simply not a valid URL and without using -s on the init it
374 # will not otherwise be tested until the fetch
375 svn
--non-interactive --username "$svnuser" --password "$svnpass" info
"$svnurl" >/dev
/null
376 # We initially use -s for the init which will possibly shorten
377 # the URL. However, the shortening can fail if a password is
378 # not required for the longer version but is for the shorter,
379 # so try again without -s if the -s version fails.
380 # We must use GIT_DIR=. here or ever so "helpful" git-svn will
381 # create a .git subdirectory!
382 GIT_DIR
=. git svn init
--username="$svnuser" --prefix "" -s "$svnurl" <"$mtlinesfile" ||
383 GIT_DIR
=. git svn init
--username="$svnuser" --prefix "" "$svnurl" <"$mtlinesfile"
384 # We need to remember this url so we can detect changes because
385 # ever so "helpful" git-svn may shorten it!
386 config_set svnurl
"$svnurl"
387 # At this point, since we asked for a standard layout (-s) git-svn
388 # may have been "helpful" and adjusted our $svnurl to a prefix and
389 # then glued the removed suffix onto the front of any svn-remote.svn.*
390 # config items. We could avoid this by not using the '-s' option
391 # but then we might not get all the history. If, for example, we
392 # are cloning an http://svn.example.com/repos/public repository that
393 # early in its history moved trunk => public/trunk we would miss that
394 # earlier history without allowing the funky shorten+prefix behavior.
395 # So we read back the svn-remote.svn.fetch configuration and compute
396 # the prefix. This way we are sure to get the correct prefix.
397 gitsvnurl
="$(git config --get svn-remote.svn.url)" ||
:
398 gitsvnfetch
="$(git config --get-all svn-remote.svn.fetch | tail -1)" ||
:
399 gitsvnprefix
="${gitsvnfetch%%:*}"
400 gitsvnsuffix
="${gitsvnprefix##*/}"
401 gitsvnprefix
="${gitsvnprefix%$gitsvnsuffix}"
402 # Ask git-svn to store everything in the normal non-remote
403 # locations being careful to use the correct prefix
404 git config
--replace-all svn-remote.svn.fetch
"${gitsvnprefix}trunk:refs/heads/master"
405 git config
--replace-all svn-remote.svn.branches
"${gitsvnprefix}branches/*:refs/heads/*"
406 git config
--replace-all svn-remote.svn.tags
"${gitsvnprefix}tags/*:refs/tags/*"
407 # look for additional non-standard directories to fetch
408 # check for standard layout at the same time
411 svn
--non-interactive --username "$svnuser" --password "$svnpass" ls "$gitsvnurl/${gitsvnprefix}" 2>/dev
/null |
412 { while read file; do case $file in
413 # skip the already-handled standard ones and any with a space or tab
415 trunk
/|branches
/|tags
/) foundstd
=1;;
416 # only fetch extra directories from the $svnurl root (not any files)
417 *?
/) git config
--add svn-remote.svn.fetch \
418 "${gitsvnprefix}${file%/}:refs/heads/${file%/}";;
421 # if files found and no standard directories present use a simpler layout
422 if [ -z "$foundstd" ] && [ -n "$foundfile" ]; then
423 git config
--unset svn-remote.svn.branches
424 git config
--unset svn-remote.svn.tags
425 git config
--replace-all svn-remote.svn.fetch
':refs/heads/master'
428 # git svn fetch on a very large repo can take some time and the
429 # remote server may interrupt the connection from time to time.
430 # keep retrying (after a brief pause) as long as we are making progress.
431 # however, we do limit the total number of retries to 1000
432 # we will, however, retry up to 5 times even if we're not making progress
433 v_get_svn_progress_fingerprint
() {
434 eval "$1="'"$({ GIT_DIR=. git svn info <"$mtlinesfile" 2>&1; git show-ref --head 2>&1; } |
435 git hash-object -t blob --stdin )"' ||
:
437 svn_ret_err
() { return "${1:-1}"; }
438 svn_retries
=1000 # maximum possible fetch attempts no matter what
439 svn_retry_backoff_start_half
=60 # min retry wait is double this amount in seconds
440 svn_backoff_count
=7 # max retry wait is $svn_retry_backoff_start_half * 2^$svn_backoff_count
441 # Cumulative backoff wait before giving up on consecutive no-progress retries
442 # is approximately 2 * $svn_retry_backoff_start_half * 2^$svn_backoff_count
443 # For a $svn_backoff_count of 7 that works out to be exactly 4h14m
445 v_get_svn_progress_fingerprint svn_progress
446 svn_progress_retries
="$svn_retries"
447 svn_retry_backoff
="$svn_retry_backoff_start_half"
449 while [ "$svn_retries" -gt 0 ]; do
450 svn_retries
="$(( $svn_retries - 1 ))"
452 GIROCCO_DIVERT_GIT_SVN_AUTO_GC
=1
453 export GIROCCO_DIVERT_GIT_SVN_AUTO_GC
454 unset GIROCCO_SUPPRESS_AUTO_GC_UPDATE
455 saveconfig
="$GIT_CONFIG_PARAMETERS"
456 git_add_config
'gc.auto=1'
457 git_add_config
'gc.autoPackLimit=1'
458 # Again, be careful to use GIT_DIR=. here or else new .git subdirectory!
459 GIT_DIR
=. git_ulimit svn fetch
--log-window-size=$var_log_window_size --username="$svnuser" --quiet <"$mtlinesfile" || svn_err
="$?"
460 GIROCCO_SUPPRESS_AUTO_GC_UPDATE
=1
461 export GIROCCO_SUPPRESS_AUTO_GC_UPDATE
462 unset GIROCCO_DIVERT_GIT_SVN_AUTO_GC
463 unset GIT_CONFIG_PARAMETERS
464 [ -z "$saveconfig" ] ||
{
465 GIT_CONFIG_PARAMETERS
="$saveconfig"
466 export GIT_CONFIG_PARAMETERS
468 [ "${svn_err:-1}" -ne 0 ] ||
break # success!
469 # Check to see if we made any progress
470 v_get_svn_progress_fingerprint svn_progress_now
471 if [ "$svn_progress_now" != "$svn_progress" ]; then
472 # we made progress, continue the loop with min wait
473 svn_progress
="$svn_progress_now"
474 svn_progress_retries
="$svn_retries"
475 svn_retry_backoff
="$svn_retry_backoff_start_half"
477 # no progress, but we only give up after
478 # $svn_backoff_count no-progress attempts in a row
479 [ "$(( $svn_progress_retries - $svn_retries ))" -lt "$svn_backoff_count" ] ||
481 # continue but only after twice the previous wait
482 # (which will still be the min wait if this is the
483 # first no-progress retry after making some progress)
485 svn_retry_backoff
="$(( 2 * $svn_retry_backoff ))"
486 # Pause for $svn_retry_backoff seconds before retrying to be friendly to the server
487 # Use that time to pack up loose objects if there are "lotsa" them
488 if ! lotsa_loose_objects_or_sopacks
; then
489 echo "Pausing for $svn_retry_backoff seconds before retrying ($(date))"
490 sleep "$svn_retry_backoff"
492 pausestop
="$(( $(date '+%s') + $svn_retry_backoff ))"
493 echo "Pausing and packing loose objects for $svn_retry_backoff seconds before retrying ($(date))"
494 pack_incremental_loose_objects_if_lockable ||
495 echo "Packing skipped (only pausing): $lockerr"
496 timenow
="$(date '+%s')"
497 if [ "$timenow" -lt "$pausestop" ]; then
498 sleepamt
="$(( $pausestop - $timenow ))"
499 [ "$sleepamt" -le "$svn_retry_backoff" ] ||
500 sleepamt
="$svn_retry_backoff" # paranoia check
504 cleanup_git_svn_leftovers
505 echo "Retrying fetch ($(date))"
507 [ "${svn_err:-1}" -eq 0 ] || svn_ret_err
"$svn_err"
508 test ${svn_err:-1} -eq 0
509 # git svn does not preserve group permissions in the svn subdirectory
510 chmod -R ug
+rw
,o
+r svn
511 # git svn also leaves behind ref turds that end with @nnn
512 # We get rid of them now
513 git for-each-ref
--format='%(refname)' |
514 LC_ALL
=C
sed '/^..*@[1-9][0-9]*$/!d; s/^/delete /' |
516 unset GIT_ASKPASS_PASSWORD
518 darcs
://* | darcs
+http
://* | darcs
+https
://*)
519 [ -n "$cfg_mirror_darcs" ] ||
{ echo "Mirroring darcs is disabled" >&2; exit 1; }
521 darcs
://*) darcsurl
="http://${url#darcs://}";;
522 *) darcsurl
="${url#darcs+}";;
524 git_darcs_fetch
"$darcsurl"
527 [ -n "$cfg_mirror_bzr" ] ||
{ echo "Mirroring bzr is disabled" >&2; exit 1; }
528 # we just remove bzr:// here, a typical bzr url is just
530 bzrurl
="${url#bzr://}"
531 git_bzr_fetch
"$bzrurl"
533 hg
+http
://* | hg
+https
://* | hg
+file://* | hg
+ssh://*)
534 [ -n "$cfg_mirror_hg" ] ||
{ echo "Mirroring hg is disabled" >&2; exit 1; }
535 # We just remove hg+ here, so hg+http://... becomes http://...
537 # Perform the initial hg clone
538 hg clone
-U "$hgurl" "$(pwd)/repo.hg"
539 # Do the fast-export | fast-import
543 # We manually add remote.origin.url and remote.origin.fetch
544 # to simulate a `git remote add --mirror=fetch` since that's
545 # not available until Git 1.7.5 and this way we guarantee we
546 # always get exactly the intended configuration and nothing else.
547 git config remote.origin.url
"$url"
548 if ! is_gfi_mirror_url
"$url" && [ "$(git config --bool girocco.cleanmirror 2>/dev/null || :)" = "true" ]; then
549 git config
--replace-all remote.origin.fetch
"+refs/heads/*:refs/heads/*"
550 git config
--add remote.origin.fetch
"+refs/tags/*:refs/tags/*"
551 git config
--add remote.origin.fetch
"+refs/notes/*:refs/notes/*"
552 git config
--add remote.origin.fetch
"+refs/top-bases/*:refs/top-bases/*"
553 git config
--bool girocco.lastupdateclean true
555 git config
--replace-all remote.origin.fetch
"+refs/*:refs/*"
556 git config
--bool girocco.lastupdateclean false
558 # Set the correct HEAD symref by using ls-remote first
559 GIT_SSL_NO_VERIFY
=1 GIT_TRACE_PACKET
=1 git ls-remote origin
>.refs-temp
2>.pkts-temp ||
561 # Since everything was redirected, on failure there'd be no output,
562 # so let's make some failure output
565 echo "git ls-remote \"$url\" failed"
568 # Compensate for git() {} side effects
569 unset GIT_TRACE_PACKET
570 # If the server is running at least Git 1.8.4.3 then it will send us the actual
571 # symref for HEAD. If we are running at least Git 1.7.5 then we can snarf that
572 # out of the packet trace data.
573 if [ -s .refs-temp
]; then
574 # Nothing to do unless the remote repository has at least 1 ref
575 # See if we got a HEAD ref
576 head="$(LC_ALL=C grep -E "^
$octet20$hexdig*[ $tab]+HEAD\$
" <.refs-temp | LC_ALL=C awk '{print $1}')"
577 # If the remote has HEAD set to a symbolic ref that does not exist
578 # then we will not receive a HEAD ref in the ls-remote output
582 if [ -n "$head" ]; then
583 symrefcap
="$(LC_ALL=C sed -ne <.pkts-temp \
584 "/packet
:.
*git
<.
*[ $tab]symref
="'HEAD:refs\/heads\/'"[^
$tab]/\
585 {s
/^.
*[ $tab]symref
="'HEAD:\(refs\/heads\/'"[^
$tab][^
$tab]*"'\).*$/\1/;p;}')"
586 # prefer $symrefcap (refs/heads/master if no $symrefcap) if it
587 # matches HEAD otherwise take the first refs/heads/... match
590 [ -n "$ref" ] ||
continue
591 matchcnt
=$
(( $matchcnt + 1 ))
592 if [ -z "$headref" ] ||
[ "$ref" = "${symrefcap:-refs/heads/master}" ]; then
595 if [ "$headref" = "${symrefcap:-refs/heads/master}" ] && [ $matchcnt -gt 1 ]; then
599 $(LC_ALL=C grep -E "^$head[ $tab]+refs/heads/[^ $tab]+\$" <.refs-temp |
600 LC_ALL=C awk '{print $2}')
602 # Warn if there was more than one match and $symrefcap is empty
603 # or $symrefcap is not the same as $headref since our choice might
604 # differ from the source repository's HEAD
605 if [ $matchcnt -ge 1 ] && [ "$symrefcap" != "$headref" ] &&
606 { [ -n "$symrefcap" ] ||
[ $matchcnt -gt 1 ]; }; then
610 if [ -z "$headref" ]; then
611 # If we still don't have a HEAD ref then prefer refs/heads/master
612 # if it exists otherwise take the first refs/heads/...
613 # We do not support having a detached HEAD.
614 # We always warn now because we will be setting HEAD differently
615 # than the source repository had HEAD set
618 [ -n "$ref" ] ||
continue
619 if [ -z "$headref" ] ||
[ "$ref" = "refs/heads/master" ]; then
622 [ "$headref" != "refs/heads/master" ] ||
break
624 $(LC_ALL=C grep -E "^$octet20$hexdig*[ $tab]+refs/heads/[^ $tab]+\$" <.refs-temp |
625 LC_ALL=C awk '{print $2}')
628 # If we STILL do not have a HEAD ref (perhaps the source repository
629 # contains only tags) then use refs/heads/master. It will be invalid
630 # but is no worse than we used to do by default and we'll warn about
631 # it. We do not support a HEAD symref to anything other than refs/heads/...
632 [ -n "$headref" ] || headref
="refs/heads/master"
633 git symbolic-ref HEAD
"$headref"
635 [ "$(git config --bool fetch.prune 2>/dev/null || :)" != "false" ] || pruneopt
=
636 # remember the starting time so we can easily detect new packs for fast-import mirrors
637 # we sleep for 1 second after creating .gfipack to make sure all packs are newer
638 if is_gfi_mirror_url
"$url" && ! [ -e .gfipack
]; then
643 GIT_SSL_NO_VERIFY
=1 git_ulimit remote update
$pruneopt
644 if [ -e .gfipack
] && is_gfi_mirror_url
"$url"; then
645 find -L objects
/pack
-type f
-newer .gfipack
-name "pack-$octet20*.pack" -print >>gfi-packs
650 git symbolic-ref HEAD
"refs/heads/master"
652 rm -f .refs-temp .pkts-temp
656 # Check the max_clone_objects setting now (if set)
657 if [ "${cfg_max_clone_objects:-0}" != "0" ]; then
658 objcount
="$(git count-objects -v | LC_ALL=C awk 'BEGIN{v=0}/^count:/||/^in-pack:/{v+=$2}END{print v}')" ||
:
659 if [ -n "$objcount" ] && [ "$objcount" -gt "$cfg_max_clone_objects" ]; then
660 exit_objs
="$objcount"
661 exit 1 # fail the clone
665 # The objects subdirectories permissions must be updated now.
666 # In the case of a dumb http clone, the permissions will not be correct
667 # (missing group write) despite the core.sharedrepository=1 setting!
668 # The objects themselves seem to have the correct permissions.
669 # This problem appears to have been fixed in the most recent git versions.
671 [ "$cfg_permission_control" != "Hooks" ] || perms
=go
+w
672 chmod $perms $
(find -L objects
-maxdepth 1 -type d
) 2>/dev
/null ||
:
674 # We may have just cloned a lot of refs and they will all be
675 # individual files at this point. Let's pack them now so we
676 # can have better performance right from the start.
679 # Initialize gitweb.lastreceive, gitweb.lastchange and info/lastactivity
680 git config gitweb.lastreceive
"$(date '+%a, %d %b %Y %T %z')"
681 git config gitweb.lastchange
"$(date '+%a, %d %b %Y %T %z')"
682 git for-each-ref
--sort=-committerdate --format='%(committerdate:iso8601)' \
683 --count=1 refs
/heads
>info
/lastactivity ||
:
684 ! [ -d htmlcache
] ||
{ >htmlcache
/changed
; } 2>/dev
/null ||
:
686 # Don't leave a multi-megabyte useless FETCH_HEAD behind
689 # Last ditch attempt to get a valid HEAD for a non-git source
690 check_and_set_head ||
:
693 echo "Final touches..."
694 git update-server-info
697 # run gc now unless the clone is empty
698 if [ -z "$warnempty" ]; then
699 git config
--unset gitweb.lastgc
2>/dev
/null ||
:
700 rm -f .delaygc .allowgc
704 [ -z "$warnempty" ] ||
706 WARNING: You have mirrored an empty repository.
709 [ -z "$showheadwarn" ] ||
[ -z "$headref" ] ||
711 NOTE: HEAD has been set to a symbolic ref to \"$headref\".
712 Use the \"Project settings\" link to choose a different HEAD symref.
717 NOTE: Since this is a mirror of a non-Git source, the initial repository
718 size may be somewhat larger than necessary. This will be corrected
719 shortly. If you intend to clone this repository you may want to
720 wait up to 1 hour before doing so in order to receive the more
723 [ -z "$mailaddrs" ] ||
724 mailref
"clone@$cfg_gitweburl/$proj.git" -s "[$cfg_name] $proj clone completed" "$mailaddrs" <<EOT || :
725 Congratulations! The clone of project $proj just completed.
728 * GitWeb interface: $cfg_gitweburl/$proj.git
729 * Project settings: $cfg_webadmurl/editproj.cgi?name=$(echo "$proj" | LC_ALL=C sed -e 's/[+]/%2B/g')
730 $emptynote$headnote$sizenote
734 echo "Mirroring finished successfuly!"
735 # In case this is a re-mirror, lastgc could have been set already so clear it now
736 git config
--unset gitweb.lastgc ||
:
737 rm .clone_in_progress
738 echo "$sizenote@OVER@"