diff --git a/.gitignore b/.gitignore index 6c9e3630..11549e10 100644 --- a/.gitignore +++ b/.gitignore @@ -27,6 +27,8 @@ aclocal.m4 autom4te.cache/ bin/ obj/ +/bin/ +SparkleShare/Mac/bin install-sh libtool ltmain.sh diff --git a/Makefile.am b/Makefile.am index 1ca9d1da..b4293c03 100755 --- a/Makefile.am +++ b/Makefile.am @@ -1,4 +1,4 @@ -basedirs = build help SmartIrc4net SparkleLib data po +basedirs = build help SparkleLib data po SUBDIRS = $(basedirs) $(GUISUBDIRS) DIST_SUBDIRS = $(basedirs) SparkleShare diff --git a/NEWS b/NEWS index 6b8eba0b..3adfa0d2 100755 --- a/NEWS +++ b/NEWS @@ -1,3 +1,25 @@ +0.6.0 for Linux and Mac (Sun Dec 25 2011): + + Hylke: + - Several fixes for annoying bugs and crashes + - Fix freeze on quit on Mac + - Show project and history size in the event log + + +0.4.2 for Linux and Mac (Fri Dec 2 2011): + Hylke: Fix crash trying to add a project + + +0.4.1 for Linux and Mac (Tue Nov 29 2011): + Hylke: Just some small tweaks and fixes: + + - Bundle git with the Mac application + - Warn about potential global gitignore files + - Remove SmartIrc4Net + - Build system fixes + - Code cleanups + + 0.4.0 for Linux and Mac (Sun Nov 12 2011): Hylke: It has been a while since the last release. Since so many things changed, and it being (softly) incompatible with 0.2, I decided diff --git a/README b/README deleted file mode 100644 index 8dfd1a40..00000000 --- a/README +++ /dev/null @@ -1,145 +0,0 @@ -# SparkleShare - -SparkleShare is a collaboration and sharing tool that is designed to keep -things simple and to stay out of your way. It allows you to instantly sync -with any Git repository you have access to. - -SparkleShare currently works on Linux and Mac. A Windows port and mobile -device support are planned for the future. - - -# License - -SparkleShare is free software and licensed under the GNU GPLv3 or later. You -are welcome to change and redistribute it under certain conditions. For more -information see the LICENSE file or visit http://www.gnu.org/licenses/gpl-3.0.html - - -# Run on Linux - -Requirements: - - - git >= 1.7.0 - - gtk-sharp2 - - gvfs - - intltool - - libnotify - - mono-core >= 2.8 - - notify-sharp - - nautilus-python - - openssh - - pygtk - - webkitgtk - - webkit-sharp - -Run the service, either click the SparkleShare launcher or: - - $ sparkleshare start - -You can stop the service via the graphical interface or by typing: - - $ sparkleshare stop - -For help: - - $ sparkleshare --help - -Note: - - SparkleShare creates its own RSA keypair in ~/config/sparkleshare/ and uses - that for authentication. Please mind this if you're planning to set up your - own server by hand. - - -# Build on Linux - -Installing the build dependencies on Debian or Ubuntu: - - $ sudo apt-get install gtk-sharp2 mono-runtime mono-devel monodevelop \ - libndesk-dbus1.0-cil-dev nant libnotify-cil-dev libgtk2.0-cil-dev mono-gmcs \ - libwebkit-cil-dev intltool libtool python-nautilus libndesk-dbus-glib1.0-cil-dev - -For Ubuntu libappindicator support, run the following before building: - - $ sudo apt-get install libappindicator0.1-cil-dev - -On Fedora: - - $ sudo yum install gtk-sharp2-devel mono-core mono-devel monodevelop \ - ndesk-dbus-devel ndesk-dbus-glib-devel nautilus-python-devel nant \ - notify-sharp-devel webkit-sharp-devel webkitgtk-devel libtool intltool \ - gnome-doc-utils - -You can build and install SparkleShare like this: - - $ ./configure --prefix=/usr (or ./autogen.sh if you build from the repository) - $ make - $ sudo make install - -Note: - - Use '--prefix=/usr' if you want the Nautilus extension to work. - - -# Run on Mac - -Just double-click the SparkleShare bundle. - - -# Build on Mac - -Install Xcode, the Mono Framework, MonoDevelop and the MonoMac plugin (you can find it in MonoDevelop => Add-in Manager). - -You may need to adjust some environment variables to let the build environment tools find mono: - - $ export PATH=/Library/Frameworks/Mono.framework/Versions/Current/bin:$PATH - $ export PKG_CONFIG=/Library/Frameworks/Mono.framework/Versions/Current/bin/pkg-config - $ export PKG_CONFIG_PATH=/Library/Frameworks/Mono.framework/Versions/Current/lib/pkgconfig - -Install git, automake, and intltool using MacPorts: - - $ sudo port install git-core automake intltool - -Start the first part of the build: - - $ ./autogen.sh --enable-gtkui=no - $ make - -Now that you have compiled the libraries, open 'SparkleShare/Mac/SparkleShare.sln' in -MonoDevelop and start the build. - -To create the SparkleShare.app, make sure the project is focused and select Project from the menu bar -and click "Create Mac Installer...". Make sure to select "Don't link assemblies". - -Save the SparkleShare.app somewhere. Paste the contents of -the following file in SparkleShare.app/Contents/MonoBundle/config: -https://raw.github.com/gist/1aeffa61bac73fc08eca/0c0f09ef9e36864c35f34fd5e8bf4f99886be193/gistfile1.txt - -Copy /Library/Frameworks/Mono.framework/Versions/Current/lib/libintl.dylib -to SparkleShare.app/Contents/Resources - -Now you should have a working bundle that you can run. - - -# Info - -Official website: -http://www.sparkleshare.org/ - -Source code: -http://github.com/SparkleShare/ - -IRC Channel: -#sparkleshare on irc.gnome.org - -Wiki: -http://github.com/hbons/SparkleShare/wiki/ - -Report issues: -http://github.com/hbons/SparkleShare/issues/ - -Translation project: -http://www.transifex.net/projects/p/sparkleshare/ - - -Now have fun and create cool things together! :) diff --git a/README.md b/README.md new file mode 100644 index 00000000..1f35cbd5 --- /dev/null +++ b/README.md @@ -0,0 +1,162 @@ +# SparkleShare + +SparkleShare is a collaboration and sharing tool that is designed to keep +things simple and to stay out of your way. It allows you to instantly sync +with any Git repository you have access to. + +SparkleShare currently works on Linux and Mac. A Windows port and mobile +device support are planned for the future. + +[![Flattr this git repo](http://api.flattr.com/button/flattr-badge-large.png)](https://flattr.com/thing/21770/SparkleShare-Sharing-work-made-easy) + + +## License + +SparkleShare is free software and licensed under the GNU GPLv3 or later. You +are welcome to change and redistribute it under certain conditions. For more +information see the LICENSE file or visit http://www.gnu.org/licenses/gpl-3.0.html + + +## Run on Linux + +Requirements: + + - git >= 1.7.0 + - gtk-sharp2 + - gvfs + - intltool + - libnotify + - mono-core >= 2.8 + - notify-sharp + - nautilus-python + - openssh + - pygtk + - webkitgtk + - webkit-sharp + +Run the service, either click the SparkleShare launcher or: + +```bash +$ sparkleshare start +``` + +You can stop the service via the graphical interface or by typing: + +```bash +$ sparkleshare stop +``` + +For help: + +```bash +$ sparkleshare --help +``` + +**Note:** + + SparkleShare creates its own RSA keypair in `~/config/sparkleshare/` and uses + that for authentication. Please mind this if you're planning to set up your + own server by hand. + + +## Build on Linux + +### Install build dependencies + +#### Debian or Ubuntu (apt): + +```bash +$ sudo apt-get install gtk-sharp2 mono-runtime mono-devel monodevelop \ + libndesk-dbus1.0-cil-dev nant libnotify-cil-dev libgtk2.0-cil-dev mono-mcs mono-gmcs \ + libwebkit-cil-dev intltool libtool python-nautilus libndesk-dbus-glib1.0-cil-dev +``` + +#### Fedora (yum): + +```bash +$ sudo yum install gtk-sharp2-devel mono-core mono-devel monodevelop \ + ndesk-dbus-devel ndesk-dbus-glib-devel nautilus-python-devel nant \ + notify-sharp-devel webkit-sharp-devel webkitgtk-devel libtool intltool \ + gnome-doc-utils +``` + +For Ubuntu `libappindicator` support, install the following package: + +```bash +$ sudo apt-get install libappindicator0.1-cil-dev +``` + +You can then build and install SparkleShare like this: + +```bash +$ ./configure --prefix=/usr (or ./autogen.sh if you build from the repository) +$ make +$ sudo make install +``` + +**Note:** Use `--prefix=/usr` if you want the Nautilus extension to work. + + +## Run on Mac + +Just double-click the SparkleShare bundle. + + +## Build on Mac + +Install Xcode, the Mono Framework, MonoDevelop and the MonoMac plugin +(you can find it in MonoDevelop => Add-in Manager). + +You may need to adjust some environment variables to let the build environment tools find mono: + +```bash +$ export PATH=/Library/Frameworks/Mono.framework/Versions/Current/bin:$PATH +$ export PKG_CONFIG=/Library/Frameworks/Mono.framework/Versions/Current/bin/pkg-config +$ export PKG_CONFIG_PATH=/Library/Frameworks/Mono.framework/Versions/Current/lib/pkgconfig +``` + +Install git, automake, and intltool using MacPorts: + +```bash +$ sudo port install git-core automake intltool +``` + +Start the first part of the build: + +```bash +$ ./autogen.sh --enable-gtkui=no +$ make +``` + +Now that you have compiled the libraries, open `SparkleShare/Mac/SparkleShare.sln` in +MonoDevelop and start the build. + +To create the SparkleShare.app, make sure the project is focused and select Project from the menu bar +and click "Create Mac Installer...". Make sure to select "Don't link assemblies". + +Save the SparkleShare.app somewhere. Paste the contents of +the following file in `SparkleShare.app/Contents/MonoBundle/config`: + +``` +https://raw.github.com/gist/1aeffa61bac73fc08eca/0c0f09ef9e36864c35f34fd5e8bf4f99886be193/gistfile1.txt +``` + +Copy `/Library/Frameworks/Mono.framework/Versions/Current/lib/libintl.dylib` to `SparkleShare.app/Contents/Resources` + + +Now you should have a working bundle that you can run. + + +## Info + +||| +|-----------------------------------:|:--------------------------| +| **Official website**: | http://www.sparkleshare.org/ | +| **Source code**: | http://github.com/SparkleShare/ | +| **IRC Channel**: | #sparkleshare on irc.gnome.org | +| **Wiki**: | http://github.com/hbons/SparkleShare/wiki/ | +| **Report issues**: | http://github.com/hbons/SparkleShare/issues/ | +| **Translation project**: | http://www.transifex.net/projects/p/sparkleshare/ | + + +Now have fun and create cool things together! :) diff --git a/SmartIrc4net b/SmartIrc4net deleted file mode 160000 index 036e2233..00000000 --- a/SmartIrc4net +++ /dev/null @@ -1 +0,0 @@ -Subproject commit 036e2233aea9bd3b4a30f8c5136daff0187eb5ec diff --git a/SparkleLib/Git/SparkleFetcherGit.cs b/SparkleLib/Git/SparkleFetcherGit.cs index d7086d7c..5b0e0098 100755 --- a/SparkleLib/Git/SparkleFetcherGit.cs +++ b/SparkleLib/Git/SparkleFetcherGit.cs @@ -90,10 +90,13 @@ namespace SparkleLib { this.git.StartInfo.RedirectStandardError = true; this.git.Start (); - + double percentage = 1.0; Regex progress_regex = new Regex (@"([0-9]+)%", RegexOptions.Compiled); - + + DateTime last_change = DateTime.Now; + TimeSpan change_interval = new TimeSpan (0, 0, 0, 1); + while (!this.git.StandardError.EndOfStream) { string line = this.git.StandardError.ReadLine (); Match match = progress_regex.Match (line); @@ -107,7 +110,7 @@ namespace SparkleLib { // the "Receiving objects" stage which we count as the last 80% if (line.Contains ("|")) // "Receiving objects" stage - number = (number / 100 * 75 + 20); + number = (number / 100 * 80 + 20); else // "Compressing objects" stage number = (number / 100 * 20); @@ -115,20 +118,21 @@ namespace SparkleLib { if (number >= percentage) { percentage = number; - - // FIXME: for some reason it doesn't go above 95% - base.OnProgressChanged (percentage); + + if (DateTime.Compare (last_change, DateTime.Now.Subtract (change_interval)) < 0) { + base.OnProgressChanged (percentage); + last_change = DateTime.Now; + } } - - System.Threading.Thread.Sleep (100); } this.git.WaitForExit (); - SparkleHelpers.DebugInfo ("Git", "Exit code " + this.git.ExitCode.ToString ()); + if (this.git.ExitCode != 0) { return false; + } else { InstallConfiguration (); InstallExcludeRules (); @@ -137,9 +141,33 @@ namespace SparkleLib { } + public override string [] Warnings { + get { + SparkleGit git = new SparkleGit (SparkleConfig.DefaultConfig.TmpPath, + "config --global core.excludesfile"); + + git.Start (); + + // Reading the standard output HAS to go before + // WaitForExit, or it will hang forever on output > 4096 bytes + string output = git.StandardOutput.ReadToEnd ().Trim (); + git.WaitForExit (); + + if (string.IsNullOrEmpty (output)) { + return null; + + } else { + return new string [] { + string.Format ("You seem to have configured a system ‘gitignore’ file. " + + "This may interfere with SparkleShare.\n({0})", output) + }; + } + } + } + public override void Stop () { - if (this.git != null) { + if (this.git != null && !this.git.HasExited) { this.git.Kill (); this.git.Dispose (); } @@ -179,22 +207,11 @@ namespace SparkleLib { // Add a .gitignore file to the repo private void InstallExcludeRules () { - string exclude_rules_file_path = SparkleHelpers.CombineMore ( - this.target_folder, ".git", "info", "exclude"); + DirectoryInfo info = Directory.CreateDirectory (SparkleHelpers.CombineMore ( + this.target_folder, ".git", "info")); - string directory = Path.GetDirectoryName(exclude_rules_file_path); - - if (directory == null) - { - return; - } - - if (!Directory.Exists(directory)) - { - Directory.CreateDirectory(directory); - } - - TextWriter writer = new StreamWriter (exclude_rules_file_path); + string exlude_rules_file_path = Path.Combine (info.FullName, "exclude"); + TextWriter writer = new StreamWriter (exlude_rules_file_path); // gedit and emacs writer.WriteLine ("*~"); @@ -226,7 +243,7 @@ namespace SparkleLib { writer.WriteLine ("Thumbs.db"); writer.WriteLine ("Desktop.ini"); - // MS Office + // MS Office writer.WriteLine ("~*.tmp"); writer.WriteLine ("~*.TMP"); writer.WriteLine ("*~*.tmp"); @@ -257,6 +274,7 @@ namespace SparkleLib { } } + public class SparkleGit : Process { public SparkleGit (string path, string args) : base () diff --git a/SparkleLib/Git/SparkleRepoGit.cs b/SparkleLib/Git/SparkleRepoGit.cs index c00d84bb..e2eda251 100644 --- a/SparkleLib/Git/SparkleRepoGit.cs +++ b/SparkleLib/Git/SparkleRepoGit.cs @@ -26,15 +26,8 @@ namespace SparkleLib { public class SparkleRepoGit : SparkleRepoBase { - private string exlude_rules_file_path; - private string ExclusionBlock = "#Temporary Exclusions"; - public SparkleRepoGit (string path, SparkleBackend backend) : - base (path, backend) { - // Set exclude file path - exlude_rules_file_path = SparkleHelpers.CombineMore ( - LocalPath, ".git", "info", "exclude"); - } + base (path, backend) { } private string identifier = null; @@ -66,6 +59,34 @@ namespace SparkleLib { } + public override List ExcludePaths { + get { + List rules = new List (); + rules.Add (Path.DirectorySeparatorChar + ".git"); + + return rules; + } + } + + + public override double Size { + get { + return CalculateSize ( + new DirectoryInfo (LocalPath) + ); + } + } + + + public override double HistorySize { + get { + return CalculateSize ( + new DirectoryInfo (Path.Combine (LocalPath, ".git")) + ); + } + } + + public override string [] UnsyncedFilePaths { get { List file_paths = new List (); @@ -144,16 +165,73 @@ namespace SparkleLib { public override bool SyncUp () { - Add (); + if (AnyDifferences) { + Add (); - string message = FormatCommitMessage (); - Commit (message); + string message = FormatCommitMessage (); + Commit (message); + } - SparkleGit git = new SparkleGit (LocalPath, "push origin master"); + + SparkleGit git = new SparkleGit (LocalPath, + "push --progress " + // Redirects progress stats to standarderror + "origin master"); + + git.StartInfo.RedirectStandardError = true; git.Start (); - git.StandardOutput.ReadToEnd (); + + double percentage = 1.0; + Regex progress_regex = new Regex (@"([0-9]+)%", RegexOptions.Compiled); + + DateTime last_change = DateTime.Now; + TimeSpan change_interval = new TimeSpan (0, 0, 0, 1); + + while (!git.StandardError.EndOfStream) { + string line = git.StandardError.ReadLine (); + Match match = progress_regex.Match (line); + string speed = ""; + double number = 0.0; + + if (match.Success) { + number = double.Parse (match.Groups [1].Value); + + // The pushing progress consists of two stages: the "Compressing + // objects" stage which we count as 20% of the total progress, and + // the "Writing objects" stage which we count as the last 80% + if (line.StartsWith ("Compressing")) { + // "Compressing objects" stage + number = (number / 100 * 20); + + } else { + // "Writing objects" stage + number = (number / 100 * 80 + 20); + + if (line.Contains ("|")) { + speed = line.Substring (line.IndexOf ("|") + 1).Trim (); + speed = speed.Replace (", done.", "").Trim (); + speed = speed.Replace ("i", ""); + speed = speed.Replace ("KB/s", "ᴋʙ/s"); + speed = speed.Replace ("MB/s", "ᴍʙ/s"); + } + } + } + + if (number >= percentage) { + percentage = number; + + if (percentage == 100.0) + percentage = 99.0; + + if (DateTime.Compare (last_change, DateTime.Now.Subtract (change_interval)) < 0) { + base.OnSyncProgressChanged (percentage, speed); + last_change = DateTime.Now; + } + } + } + git.WaitForExit (); + if (git.ExitCode == 0) return true; else @@ -163,10 +241,63 @@ namespace SparkleLib { public override bool SyncDown () { - SparkleGit git = new SparkleGit (LocalPath, "fetch -v"); + SparkleGit git = new SparkleGit (LocalPath, "fetch --progress"); + + git.StartInfo.RedirectStandardError = true; git.Start (); + + double percentage = 1.0; + Regex progress_regex = new Regex (@"([0-9]+)%", RegexOptions.Compiled); + + DateTime last_change = DateTime.Now; + TimeSpan change_interval = new TimeSpan (0, 0, 0, 1); + + while (!git.StandardError.EndOfStream) { + string line = git.StandardError.ReadLine (); + Match match = progress_regex.Match (line); + string speed = ""; + double number = 0.0; + + if (match.Success) { + number = double.Parse (match.Groups [1].Value); + + // The fetching progress consists of two stages: the "Compressing + // objects" stage which we count as 20% of the total progress, and + // the "Receiving objects" stage which we count as the last 80% + if (line.StartsWith ("Compressing")) { + // "Compressing objects" stage + number = (number / 100 * 20); + + } else { + // "Writing objects" stage + number = (number / 100 * 80 + 20); + + if (line.Contains ("|")) { + speed = line.Substring (line.IndexOf ("|") + 1).Trim (); + speed = speed.Replace (", done.", "").Trim (); + speed = speed.Replace ("i", ""); + speed = speed.Replace ("KB/s", "ᴋʙ/s"); + speed = speed.Replace ("MB/s", "ᴍʙ/s"); + } + } + } + + if (number >= percentage) { + percentage = number; + + if (percentage == 100.0) + percentage = 99.0; + + if (DateTime.Compare (last_change, DateTime.Now.Subtract (change_interval)) < 0) { + base.OnSyncProgressChanged (percentage, speed); + last_change = DateTime.Now; + } + } + } + git.WaitForExit (); + if (git.ExitCode == 0) { Rebase (); return true; @@ -216,6 +347,7 @@ namespace SparkleLib { if (value) { if (!File.Exists (unsynced_file_path)) File.Create (unsynced_file_path).Close (); + } else { File.Delete (unsynced_file_path); } @@ -233,177 +365,16 @@ namespace SparkleLib { SparkleHelpers.DebugInfo ("Git", "[" + Name + "] Changes staged"); } - // Add a new file to be ignored - public override bool AddExclusionRule (FileSystemEventArgs args) { - - string RelativePath = SparkleHelpers.DiffPaths(args.FullPath, LocalPath); - - List exclusions; - try { - exclusions = ReadExclusionRules(); - } - catch { - return false; - } - - // Look for the local exclusions section - bool added = false; - for(int i = 0; i < exclusions.Count; i++) { - string entry = exclusions[i]; - if(entry.Equals(ExclusionBlock)) { - // add a new exclusion rule containing a file path - exclusions.Insert(i + 1, RelativePath); - added = true; - break; - } - } - - /* - * For compability to existing repos: - * Add a "#Temporary Exclusions"-Block to the - * ignore file in order to recognize this - * exclude rules later on - */ - if(!added) { - exclusions.Add(ExclusionBlock); - exclusions.Add(RelativePath); - } - - // Write exceptions list back to file - return WriteExclusionRules(exclusions); - } - - // Check whether a specific rule exists in the exclusion file - public override bool ExclusionRuleExists(FileSystemEventArgs args) { - string RelativePath = SparkleHelpers.DiffPaths(args.FullPath, LocalPath); - - List exclusions; - try { - // Read rules from temporary block only - exclusions = ReadExclusionRules(true); - - foreach(string entry in exclusions) { - if(entry.Equals(RelativePath)) { - return true; - } - } - } catch { - SparkleHelpers.DebugInfo("Error", "Cannot determine whether an exclusion rule for " + - args.FullPath + " already exists or not."); - return false; - } - - return false; - } - - // Remove file from exclusion list when they are readable again - public override bool RemoveExclusionRule(FileSystemEventArgs args) { - string RelativePath = SparkleHelpers.DiffPaths(args.FullPath, LocalPath); - - List exclusions; - try { - exclusions = ReadExclusionRules(); - - /* - * Removing a rule should only apply to rules in the "Temporary Exclusion"-block. - * Therefore we first read until reaching the block and then remove the rule. - * - * We cannot use ReadExclusionRules(true) here since we write all lines back - * to the file. This would result in a crippled exclusion file. - */ - bool BlockReached = false; - foreach(string entry in exclusions) { - if(entry.Equals(ExclusionBlock)) { - BlockReached = true; - } - - // Remove this rule - if(BlockReached && entry.Equals(RelativePath)) { - exclusions.Remove(entry); - break; - } - } - - return WriteExclusionRules(exclusions); - } catch { - SparkleHelpers.DebugInfo("Error", "Unable to remove exclusion rule for entry " + RelativePath); - return false; - } - } - - // Reads the exclusion rules file into a string list - private List ReadExclusionRules() { - - List exclusions = new List(); - TextReader reader = new StreamReader (exlude_rules_file_path);; - - try { - while(reader.Peek() > -1) { - exclusions.Add(reader.ReadLine().TrimEnd()); - } - } - catch (IOException e) { - SparkleHelpers.DebugInfo("Error", "Reading from exclusion file failed: " + e.Message); - return new List(); - } - finally { - if(reader != null) { - reader.Close(); - } - } - - return exclusions; - } - - // Reads rules only from temporary exclusion block - private List ReadExclusionRules(bool TempOnly) { - if(TempOnly) { - bool ForceRead = false; - List exclusions = new List(); - foreach(string entry in ReadExclusionRules()) { - if(ForceRead || entry.Equals(ExclusionBlock)) { - exclusions.Add(entry); - ForceRead = true; - } - } - - return exclusions; - } - - return ReadExclusionRules(); - } - - // Writes the exclusion rules file with a given string list - private bool WriteExclusionRules(List lines) { - - TextWriter writer = new StreamWriter (exlude_rules_file_path); - - try { - foreach(string line in lines) { - writer.WriteLine(line.TrimEnd()); - } - } catch(IOException e) { - SparkleHelpers.DebugInfo("Error", "Writing into exclusion file failed: " + e.Message); - return false; - } - finally { - if(writer != null) { - writer.Close(); - } - } - - return true; - } // Removes unneeded objects - private void CollectGarbage () +/* private void CollectGarbage () { SparkleGit git = new SparkleGit (LocalPath, "gc"); git.Start (); git.WaitForExit (); SparkleHelpers.DebugInfo ("Git", "[" + Name + "] Garbage collected."); - } + } */ // Commits the made changes @@ -486,11 +457,6 @@ namespace SparkleLib { string output = git_status.StandardOutput.ReadToEnd ().TrimEnd (); git_status.WaitForExit (); - if (String.IsNullOrEmpty (output)) { - // no conflict any more. - return; - } - string [] lines = output.Split ("\n".ToCharArray ()); foreach (string line in lines) { @@ -571,13 +537,10 @@ namespace SparkleLib { List change_sets = new List (); - SparkleGit git_log = new SparkleGit (LocalPath, "log -" + count + " --raw -M --date=iso"); - if ((SparkleBackend.Platform == PlatformID.Unix || - SparkleBackend.Platform == PlatformID.MacOSX)) { - // this causes an IOException on windows - Console.OutputEncoding = System.Text.Encoding.Unicode; - } + // Console.InputEncoding = System.Text.Encoding.Unicode; + Console.OutputEncoding = System.Text.Encoding.Unicode; + SparkleGit git_log = new SparkleGit (LocalPath, "log -" + count + " --raw -M --date=iso"); git_log.Start (); // Reading the standard output HAS to go before @@ -709,8 +672,12 @@ namespace SparkleLib { FillEmptyDirectories (child_path); } - if (Directory.GetFiles (path).Length == 0 && !path.Equals (LocalPath)) + if (Directory.GetFiles (path).Length == 0 && + Directory.GetDirectories (path).Length == 0 && + !path.Equals (LocalPath)) { + File.Create (Path.Combine (path, ".empty")).Close (); + } } @@ -752,6 +719,10 @@ namespace SparkleLib { foreach (string added in Added) { file_name = added.Trim ("\"".ToCharArray ()); + + if (file_name.EndsWith (".empty")) + file_name = file_name.Substring (0, file_name.Length - 6); + message += "+ ‘" + file_name + "’" + n; count++; @@ -761,6 +732,10 @@ namespace SparkleLib { foreach (string modified in Modified) { file_name = modified.Trim ("\"".ToCharArray ()); + + if (file_name.EndsWith (".empty")) + file_name = file_name.Substring (0, file_name.Length - 6); + message += "/ ‘" + file_name + "’" + n; count++; @@ -770,6 +745,10 @@ namespace SparkleLib { foreach (string removed in Removed) { file_name = removed.Trim ("\"".ToCharArray ()); + + if (file_name.EndsWith (".empty")) + file_name = file_name.Substring (0, file_name.Length - 6); + message += "- ‘" + file_name + "’" + n; count++; @@ -796,5 +775,38 @@ namespace SparkleLib { base.CreateInitialChangeSet (); SyncUp (); } + + + // Recursively gets a folder's size in bytes + public override double CalculateSize (DirectoryInfo parent) + { + if (!Directory.Exists (parent.ToString ())) + return 0; + + double size = 0; + + // Ignore the temporary 'rebase-apply' and '.tmp' directories. This prevents potential + // crashes when files are being queried whilst the files have already been deleted. + if (parent.Name.Equals ("rebase-apply") || + parent.Name.Equals (".tmp")) + return 0; + + try { + foreach (FileInfo file in parent.GetFiles()) { + if (!file.Exists) + return 0; + + size += file.Length; + } + + foreach (DirectoryInfo directory in parent.GetDirectories ()) + size += CalculateSize (directory); + + } catch (Exception) { + return 0; + } + + return size; + } } } diff --git a/SparkleLib/Hg/SparkleFetcherHg.cs b/SparkleLib/Hg/SparkleFetcherHg.cs deleted file mode 100755 index d89a3243..00000000 --- a/SparkleLib/Hg/SparkleFetcherHg.cs +++ /dev/null @@ -1,172 +0,0 @@ -// SparkleShare, a collaboration and sharing tool. -// Copyright (C) 2010 Hylke Bons -// -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. -// -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. -// -// You should have received a copy of the GNU General Public License -// along with this program. If not, see . - - -using System; -using System.IO; -using System.Diagnostics; -using System.Xml; - -namespace SparkleLib { - - // Sets up a fetcher that can get remote folders - public class SparkleFetcherHg : SparkleFetcherBase { - - public SparkleFetcherHg (string server, string remote_folder, string target_folder) : - base (server, remote_folder, target_folder) { } - - - public override bool Fetch () - { - SparkleHg hg = new SparkleHg (SparklePaths.SparkleTmpPath, - "clone \"" + base.remote_url + "\" " + "\"" + base.target_folder + "\""); - - hg.Start (); - hg.WaitForExit (); - - SparkleHelpers.DebugInfo ("Hg", "Exit code " + hg.ExitCode.ToString ()); - - if (hg.ExitCode != 0) { - return false; - } else { - InstallConfiguration (); - InstallExcludeRules (); - return true; - } - } - - - // Install the user's name and email and some config into - // the newly cloned repository - private void InstallConfiguration () - { - string global_config_file_path = Path.Combine (SparklePaths.SparkleConfigPath, "config.xml"); - - if (!File.Exists (global_config_file_path)) - return; - - string repo_config_file_path = SparkleHelpers.CombineMore (base.target_folder, ".hg", "hgrc"); - string config = String.Join (Environment.NewLine, File.ReadAllLines (repo_config_file_path)); - - // Add user info - string n = Environment.NewLine; - XmlDocument xml = new XmlDocument(); - xml.Load (global_config_file_path); - - XmlNode node_name = xml.SelectSingleNode ("//user/name/text()"); - XmlNode node_email = xml.SelectSingleNode ("//user/email/text()"); - - // TODO this ignore duplicate names (FolderName (2)) - string ignore_file_path = base.target_folder.Replace (SparklePaths.SparkleTmpPath, - SparklePaths.SparklePath); - - ignore_file_path = SparkleHelpers.CombineMore (ignore_file_path, ".hg", "hgignore"); - - config += n + - "[ui]" + n + - "username = " + node_name.Value + " <" + node_email.Value + ">" + n + - "ignore = " + ignore_file_path + n; - - // Write the config to the file - TextWriter writer = new StreamWriter (repo_config_file_path); - writer.WriteLine (config); - writer.Close (); - - string style_file_path = SparkleHelpers.CombineMore (base.target_folder, ".hg", "log.style"); - - string style = "changeset = \"{file_mods}{file_adds}{file_dels}\"" + n + - "file_add = \"A {file_add}\\n\"" + n + - "file_mod = \"M {file_mod}\\n\"" + n + - "file_del = \"D {file_del}\\n\"" + n; - - writer = new StreamWriter (style_file_path); - writer.WriteLine (style); - writer.Close (); - - SparkleHelpers.DebugInfo ("Config", "Added configuration to '" + repo_config_file_path + "'"); - } - - - // Add a .gitignore file to the repo - private void InstallExcludeRules () - { - string exlude_rules_file_path = SparkleHelpers.CombineMore ( - this.target_folder, ".hg", "hgignore"); - - TextWriter writer = new StreamWriter (exlude_rules_file_path); - - writer.WriteLine ("syntax: glob"); - - // gedit and emacs - writer.WriteLine ("*~"); - - // vi(m) - writer.WriteLine (".*.sw[a-z]"); - writer.WriteLine ("*.un~"); - writer.WriteLine ("*.swp"); - writer.WriteLine ("*.swo"); - - // KDE - writer.WriteLine (".directory"); - - // Mac OSX - writer.WriteLine (".DS_Store"); - writer.WriteLine ("Icon?"); - writer.WriteLine ("._*"); - writer.WriteLine (".Spotlight-V100"); - writer.WriteLine (".Trashes"); - - // Mac OSX - writer.WriteLine ("*(Autosaved).graffle"); - - // Windows - writer.WriteLine ("Thumbs.db"); - writer.WriteLine ("Desktop.ini"); - - // CVS - writer.WriteLine ("*/CVS/*"); - writer.WriteLine (".cvsignore"); - writer.WriteLine ("*/.cvsignore"); - - // Subversion - writer.WriteLine ("/.svn/*"); - writer.WriteLine ("*/.svn/*"); - - writer.Close (); - } - } - - - public class SparkleHg : Process { - - public SparkleHg (string path, string args) : base () - { - EnableRaisingEvents = true; - StartInfo.FileName = "/opt/local/bin/hg"; - StartInfo.Arguments = args; - StartInfo.RedirectStandardOutput = true; - StartInfo.UseShellExecute = false; - StartInfo.WorkingDirectory = path; - } - - - new public void Start () - { - SparkleHelpers.DebugInfo ("Cmd", StartInfo.FileName + " " + StartInfo.Arguments); - base.Start (); - } - } -} diff --git a/SparkleLib/Hg/SparkleRepoHg.cs b/SparkleLib/Hg/SparkleRepoHg.cs deleted file mode 100755 index f65c3435..00000000 --- a/SparkleLib/Hg/SparkleRepoHg.cs +++ /dev/null @@ -1,487 +0,0 @@ -// SparkleShare, a collaboration and sharing tool. -// Copyright (C) 2010 Hylke Bons -// -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. -// -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. -// -// You should have received a copy of the GNU General Public License -// along with this program. If not, see . - - -using System; -using System.Collections.Generic; -using System.Diagnostics; -using System.IO; -using System.Text.RegularExpressions; - -namespace SparkleLib { - - public class SparkleRepoHg : SparkleRepoBase { - - private string exlude_rules_file_path; - private string ExclusionBlock = "#Temporary Exclusions"; - - public SparkleRepoHg (string path, SparkleBackend backend) : - base (path, backend) { - // Set exclude file path - exlude_rules_file_path = SparkleHelpers.CombineMore ( - LocalPath, ".hg", "hgignore"); - } - - - public override string Identifier { - get { - SparkleHg hg = new SparkleHg (LocalPath, "log -r : --limit 1 --template \"{node}\""); - hg.Start (); - hg.WaitForExit (); - - return hg.StandardOutput.ReadToEnd (); - } - } - - - public override string CurrentRevision { - get { - SparkleHg hg = new SparkleHg (LocalPath, "log --limit 1 --template \"{node}\""); - hg.Start (); - hg.WaitForExit (); - - string hash = hg.StandardOutput.ReadToEnd ().Trim (); - if (hash.Length > 0) - return hash; - else - return null; - } - } - - - public override bool CheckForRemoteChanges () - { - return true; // Mercurial doesn't have a way to check for the remote hash - } - - - public override bool SyncUp () - { - Add (); - - string message = FormatCommitMessage (); - Commit (message); - - SparkleHg hg = new SparkleHg (LocalPath, "push"); - - hg.Start (); - hg.WaitForExit (); - - if (hg.ExitCode == 0) { - return true; - } else { - return false; - } - } - - - public override bool SyncDown () - { - SparkleHg hg = new SparkleHg (LocalPath, "pull"); - - hg.Start (); - hg.WaitForExit (); - - if (hg.ExitCode == 0) { - Merge (); - return true; - } else { - return false; - } - } - - - public override bool AnyDifferences { - get { - SparkleHg hg = new SparkleHg (LocalPath, "status"); - hg.Start (); - hg.WaitForExit (); - - string output = hg.StandardOutput.ReadToEnd ().TrimEnd (); - string [] lines = output.Split ("\n".ToCharArray ()); - - foreach (string line in lines) { - if (line.Length > 1 && !line [1].Equals (" ")) - return true; - } - - return false; - } - } - - - public override bool HasUnsyncedChanges { - get { - string unsynced_file_path = SparkleHelpers.CombineMore (LocalPath, - ".hg", "has_unsynced_changes"); - - return File.Exists (unsynced_file_path); - } - - set { - string unsynced_file_path = SparkleHelpers.CombineMore (LocalPath, - ".hg", "has_unsynced_changes"); - - if (value) { - if (!File.Exists (unsynced_file_path)) - File.Create (unsynced_file_path).Close (); - } else { - File.Delete (unsynced_file_path); - } - } - } - - - // Stages the made changes - private void Add () - { - SparkleHg hg = new SparkleHg (LocalPath, "addremove --quiet"); - hg.Start (); - hg.WaitForExit (); - - SparkleHelpers.DebugInfo ("Hg", "[" + Name + "] Changes staged"); - } - - // Add a new file to be ignored - public override bool AddExclusionRule (FileSystemEventArgs args) { - - string RelativePath = SparkleHelpers.DiffPaths(args.FullPath, LocalPath); - - List exclusions; - try { - exclusions = ReadExclusionRules(); - } - catch { - return false; - } - - // Look for the local exclusions section - bool added = false; - for(int i = 0; i < exclusions.Count; i++) { - string entry = exclusions[i]; - if(entry.Equals(ExclusionBlock)) { - // add a new exclusion rule containing a file path - exclusions.Insert(i + 1, RelativePath); - added = true; - break; - } - } - - /* - * For compability to existing repos: - * Add a "#Temporary Exclusions"-Block to the - * ignore file in order to recognize this - * exclude rules later on - */ - if(!added) { - exclusions.Add(ExclusionBlock); - exclusions.Add(RelativePath); - } - - // Write exceptions list back to file - return WriteExclusionRules(exclusions); - } - - // Check whether a specific rule exists in the exclusion file - public override bool ExclusionRuleExists(FileSystemEventArgs args) { - string RelativePath = SparkleHelpers.DiffPaths(args.FullPath, LocalPath); - - List exclusions; - try { - // Read rules from temporary block only - exclusions = ReadExclusionRules(true); - - foreach(string entry in exclusions) { - if(entry.Equals(RelativePath)) { - return true; - } - } - } catch { - SparkleHelpers.DebugInfo("Error", "Cannot determine whether an exclusion rule for " + - args.FullPath + " already exists or not."); - return false; - } - - return false; - } - - // Remove file from exclusion list when they are readable again - public override bool RemoveExclusionRule(FileSystemEventArgs args) { - string RelativePath = SparkleHelpers.DiffPaths(args.FullPath, LocalPath); - - List exclusions; - try { - exclusions = ReadExclusionRules(); - - /* - * Removing a rule should only apply to rules in the "Temporary Exclusion"-block. - * Therefore we first read until reaching the block and then remove the rule. - * - * We cannot use ReadExclusionRules(true) here since we write all lines back - * to the file. This would result in a crippled exclusion file. - */ - bool BlockReached = false; - foreach(string entry in exclusions) { - if(entry.Equals(ExclusionBlock)) { - BlockReached = true; - } - - // Remove this rule - if(BlockReached && entry.Equals(RelativePath)) { - exclusions.Remove(entry); - break; - } - } - - return WriteExclusionRules(exclusions); - } catch { - SparkleHelpers.DebugInfo("Error", "Unable to remove exclusion rule for entry " + RelativePath); - return false; - } - } - - // Reads the exclusion rules file into a string list - private List ReadExclusionRules() { - - List exclusions = new List(); - TextReader reader = new StreamReader (exlude_rules_file_path);; - - try { - while(reader.Peek() > -1) { - exclusions.Add(reader.ReadLine().TrimEnd()); - } - } - catch (IOException e) { - SparkleHelpers.DebugInfo("Error", "Reading from exclusion file failed: " + e.Message); - return new List(); - } - finally { - if(reader != null) { - reader.Close(); - } - } - - return exclusions; - } - - // Reads rules only from temporary exclusion block - private List ReadExclusionRules(bool TempOnly) { - if(TempOnly) { - bool ForceRead = false; - List exclusions = new List(); - foreach(string entry in ReadExclusionRules()) { - if(ForceRead || entry.Equals(ExclusionBlock)) { - exclusions.Add(entry); - ForceRead = true; - } - } - - return exclusions; - } - - return ReadExclusionRules(); - } - - // Writes the exclusion rules file with a given string list - private bool WriteExclusionRules(List lines) { - - TextWriter writer = new StreamWriter (exlude_rules_file_path); - - try { - foreach(string line in lines) { - writer.WriteLine(line.TrimEnd()); - } - } catch(IOException e) { - SparkleHelpers.DebugInfo("Error", "Writing into exclusion file failed: " + e.Message); - return false; - } - finally { - if(writer != null) { - writer.Close(); - } - } - - return true; - } - - // Commits the made changes - private void Commit (string message) - { - if (!AnyDifferences) - return; - - SparkleHg hg = new SparkleHg (LocalPath, "commit -m '" + message + "'"); - hg.Start (); - hg.WaitForExit (); - - SparkleHelpers.DebugInfo ("Commit", "[" + Name + "] " + message); - } - - - // Merges the fetched changes - private void Merge () - { - DisableWatching (); - - if (AnyDifferences) { - Add (); - - string commit_message = FormatCommitMessage (); - Commit (commit_message); - } - - SparkleHg hg = new SparkleHg (LocalPath, "update"); - - hg.Start (); - hg.WaitForExit (); - - EnableWatching (); - } - - - // Returns a list of the latest change sets - public override List GetChangeSets (int count) - { - if (count < 1) - count = 30; - - List change_sets = new List (); - - SparkleHg hg_log = new SparkleHg (LocalPath, "log --limit " + count + " --style changelog --verbose --stat"); - Console.OutputEncoding = System.Text.Encoding.Unicode; - hg_log.Start (); - - // Reading the standard output HAS to go before - // WaitForExit, or it will hang forever on output > 4096 bytes - string output = hg_log.StandardOutput.ReadToEnd (); - hg_log.WaitForExit (); - - string [] lines = output.Split ("\n".ToCharArray ()); - List entries = new List (); - - int j = 0; - string entry = "", last_entry = ""; - foreach (string line in lines) { - if (line.StartsWith ("2") && line.EndsWith (")") && j > 0) { - entries.Add (entry); - entry = ""; - } - - entry += line + "\n"; - j++; - - last_entry = entry; - } - - entries.Add (last_entry); - - Regex regex = new Regex (@"([0-9]{4})-([0-9]{2})-([0-9]{2}).*([0-9]{2}):([0-9]{2}).*.([0-9]{4})" + - "(.+)<(.+)>.*.([a-z0-9]{12})", RegexOptions.Compiled); - - foreach (string log_entry in entries) { - - bool is_merge_commit = false; - - Match match = regex.Match (log_entry); - - if (!match.Success) - continue; - - SparkleChangeSet change_set = new SparkleChangeSet () { - Revision = match.Groups [9].Value, - IsMagical = is_merge_commit - }; - - change_set.User.Name = match.Groups [7].Value.Trim (); - change_set.User.Email = match.Groups [8].Value; - - - change_set.Timestamp = new DateTime (int.Parse (match.Groups [1].Value), - int.Parse (match.Groups [2].Value), int.Parse (match.Groups [3].Value), - int.Parse (match.Groups [4].Value), int.Parse (match.Groups [5].Value), 0); - - string [] entry_lines = log_entry.Split ("\n".ToCharArray ()); - - foreach (string entry_line in entry_lines) { - if (!entry_line.StartsWith ("\t* ")) - continue; - - if (entry_line.EndsWith ("new file.")) { - string files = entry_line.Substring (3, entry_line.Length - 13); - string [] added_files = files.Split (",".ToCharArray ()); - - foreach (string added_file in added_files) { - string file = added_file.TrimEnd (": ".ToCharArray ()); - change_set.Added.Add (file); - } - - } else if (entry_line.EndsWith ("deleted file.")) { - string files = entry_line.Substring (3, entry_line.Length - 17); - string [] deleted_files = files.Split (",".ToCharArray ()); - - foreach (string deleted_file in deleted_files) { - string file = deleted_file.TrimEnd (": ".ToCharArray ()); - change_set.Deleted.Add (file); - } - - } else if (!"".Equals (entry_line.Trim ())){ - string files = entry_line.Substring (3); - files = files.TrimEnd (":".ToCharArray()); - string [] edited_files = files.Split (",".ToCharArray ()); - - foreach (string edited_file in edited_files) { - if (!change_set.Added.Contains (edited_file) && - !change_set.Deleted.Contains (edited_file)) { - - change_set.Edited.Add (edited_file); - } - } - } - } - - change_sets.Add (change_set); - } - - return change_sets; - } - - - // Creates a pretty commit message based on what has changed - private string FormatCommitMessage () // TODO - { - return "SparkleShare Hg"; - } - - - public override void CreateInitialChangeSet () - { - base.CreateInitialChangeSet (); - Add (); - - string message = FormatCommitMessage (); - Commit (message); - } - - - public override bool UsesNotificationCenter - { - get { - string file_path = SparkleHelpers.CombineMore (LocalPath, ".hg", "disable_notification_center"); - return !File.Exists (file_path); - } - } - } -} diff --git a/SparkleLib/Makefile.am b/SparkleLib/Makefile.am index 6356372a..b696aa1b 100755 --- a/SparkleLib/Makefile.am +++ b/SparkleLib/Makefile.am @@ -1,12 +1,6 @@ ASSEMBLY = SparkleLib TARGET = library -if HAVE_SMARTIRC4NET -LINK = $(REF_SPARKLELIB) $(LINK_SMARTIRC4NET_SYSTEM) -else -LINK = $(REF_SPARKLELIB) $(LINK_SMARTIRC4NET_LOCAL) -endif - SOURCES = \ Defines.cs \ Git/SparkleFetcherGit.cs \ @@ -17,17 +11,11 @@ SOURCES = \ SparkleFetcherBase.cs \ SparkleHelpers.cs \ SparkleListenerBase.cs \ - SparkleListenerIrc.cs \ SparkleListenerTcp.cs \ - SparkleOptions.cs \ SparkleRepoBase.cs \ SparkleWatcher.cs -SMARTIRC4NET_FILES_EXPANDED = $(foreach file, $(SMARTIRC4NET_FILES), $(top_builddir)/$(file)) - -EXTRA_BUNDLE = $(SMARTIRC4NET_FILES_EXPANDED) - install-data-hook: for ASM in $(EXTRA_BUNDLE); do \ $(INSTALL) -m 0755 $$ASM $(DESTDIR)$(moduledir); \ diff --git a/SparkleLib/SparkleChangeSet.cs b/SparkleLib/SparkleChangeSet.cs index 5496e44e..a24faaaa 100755 --- a/SparkleLib/SparkleChangeSet.cs +++ b/SparkleLib/SparkleChangeSet.cs @@ -102,7 +102,7 @@ namespace SparkleLib { public class SparkleFolder { public string Name; - // TODO: Uri + public Uri RemoteAddress; public string FullPath { get { diff --git a/SparkleLib/SparkleConfig.cs b/SparkleLib/SparkleConfig.cs index 1fd3fece..f9f57548 100755 --- a/SparkleLib/SparkleConfig.cs +++ b/SparkleLib/SparkleConfig.cs @@ -19,10 +19,7 @@ using System; using System.IO; using System.Collections.Generic; using System.Xml; - -#if __MonoCS__ -using Mono.Unix; -#endif +using System.Security.Principal; namespace SparkleLib { @@ -33,12 +30,14 @@ namespace SparkleLib { "sparkleshare"); public static SparkleConfig DefaultConfig = new SparkleConfig (ConfigPath, "config.xml"); - - public string FullPath; - public string HomePath = Environment.GetFolderPath (Environment.SpecialFolder.Personal); - public string TmpPath; + public string HomePath { + get { + return Environment.GetFolderPath (Environment.SpecialFolder.Personal); + } + } + public string FoldersPath { get { @@ -49,11 +48,16 @@ namespace SparkleLib { } } + public string TmpPath { + get { + return Path.Combine (FoldersPath, ".tmp"); + } + } + public SparkleConfig (string config_path, string config_file_name) { FullPath = System.IO.Path.Combine (config_path, config_file_name); - TmpPath = Path.Combine (FoldersPath, ".tmp"); if (!Directory.Exists (config_path)) { Directory.CreateDirectory (config_path); @@ -99,13 +103,13 @@ namespace SparkleLib { if (SparkleBackend.Platform == PlatformID.Unix || SparkleBackend.Platform == PlatformID.MacOSX) { -#if __MonoCS__ - user_name = new UnixUserInfo (UnixEnvironment.UserName).RealName; + + user_name = Environment.UserName; if (string.IsNullOrEmpty (user_name)) - user_name = UnixEnvironment.UserName; + user_name = ""; else - user_name = user_name.TrimEnd (",".ToCharArray()); -#endif + user_name = user_name.TrimEnd (",".ToCharArray ()); + } else { user_name = Environment.UserName; } @@ -148,10 +152,59 @@ namespace SparkleLib { email_node.InnerText = user.Email; this.Save (); + + ConfigureSSH (); } } + private void ConfigureSSH () + { + if (User.Email.Equals ("Unknown")) + return; + + string path = Environment.GetFolderPath (Environment.SpecialFolder.Personal); + + if (!(SparkleBackend.Platform == PlatformID.Unix || + SparkleBackend.Platform == PlatformID.MacOSX)) { + + path = Environment.ExpandEnvironmentVariables ("%HOMEDRIVE%%HOMEPATH%"); + } + + string ssh_config_path = Path.Combine (path, ".ssh"); + string ssh_config_file_path = SparkleHelpers.CombineMore (path, ".ssh", "config"); + string ssh_config = "IdentityFile " + + Path.Combine (SparkleConfig.ConfigPath, "sparkleshare." + User.Email + ".key"); + + if (!Directory.Exists (ssh_config_path)) + Directory.CreateDirectory (ssh_config_path); + + if (File.Exists (ssh_config_file_path)) { + string current_config = File.ReadAllText (ssh_config_file_path); + if (current_config.Contains (ssh_config)) + return; + + if (current_config.EndsWith ("\n\n")) + ssh_config = "# SparkleShare's key\n" + ssh_config; + else if (current_config.EndsWith ("\n")) + ssh_config = "\n# SparkleShare's key\n" + ssh_config; + else + ssh_config = "\n\n# SparkleShare's key\n" + ssh_config; + + TextWriter writer = File.AppendText (ssh_config_file_path); + writer.Write (ssh_config + "\n"); + writer.Close (); + + } else { + File.WriteAllText (ssh_config_file_path, ssh_config); + } + + Chmod644 (ssh_config_file_path); + + SparkleHelpers.DebugInfo ("Config", "Added key to " + ssh_config_file_path); + } + + public List Folders { get { List folders = new List (); @@ -348,6 +401,16 @@ namespace SparkleLib { this.Save (FullPath); SparkleHelpers.DebugInfo ("Config", "Updated \"" + FullPath + "\""); } + + + private void Chmod644 (string file_path) + { + // Hack to be able to set the permissions on a file + // that OpenSSH still likes without resorting to Mono.Unix + FileInfo file_info = new FileInfo (file_path); + file_info.Attributes = FileAttributes.ReadOnly; + file_info.Attributes = FileAttributes.Normal; + } } diff --git a/SparkleLib/SparkleFetcherBase.cs b/SparkleLib/SparkleFetcherBase.cs index 0ab93307..da69976d 100755 --- a/SparkleLib/SparkleFetcherBase.cs +++ b/SparkleLib/SparkleFetcherBase.cs @@ -18,20 +18,17 @@ using System; using System.IO; using System.Diagnostics; +using System.Security.AccessControl; using System.Text.RegularExpressions; using System.Threading; -#if __MonoCS__ -using Mono.Unix; -#endif - namespace SparkleLib { // Sets up a fetcher that can get remote folders public abstract class SparkleFetcherBase { public delegate void StartedEventHandler (); - public delegate void FinishedEventHandler (); + public delegate void FinishedEventHandler (string [] warnings); public delegate void FailedEventHandler (); public delegate void ProgressChangedEventHandler (double percentage); @@ -54,7 +51,7 @@ namespace SparkleLib { public abstract bool Fetch (); - + public abstract string [] Warnings { get; } // Clones the remote repository public void Start () @@ -85,7 +82,7 @@ namespace SparkleLib { EnableHostKeyCheckingForHost (host); if (Finished != null) - Finished (); + Finished (Warnings); } else { SparkleHelpers.DebugInfo ("Fetcher", "Failed"); @@ -159,13 +156,8 @@ namespace SparkleLib { File.WriteAllText (ssh_config_file_path, ssh_config); } -#if __MonoCS__ - UnixFileSystemInfo file_info = new UnixFileInfo (ssh_config_file_path); - file_info.FileAccessPermissions = (FileAccessPermissions.UserRead | - FileAccessPermissions.UserWrite); -#endif - - SparkleHelpers.DebugInfo ("Fetcher", "Disabled host key checking " + host); + Chmod644 (ssh_config_file_path); + SparkleHelpers.DebugInfo ("Fetcher", "Disabled host key checking for " + host); } @@ -173,8 +165,8 @@ namespace SparkleLib { { string path = SparkleConfig.DefaultConfig.HomePath; - if (!(SparkleBackend.Platform == PlatformID.Unix || - SparkleBackend.Platform == PlatformID.MacOSX)) { + if (SparkleBackend.Platform != PlatformID.Unix && + SparkleBackend.Platform != PlatformID.MacOSX) { path = Environment.ExpandEnvironmentVariables ("%HOMEDRIVE%%HOMEPATH%"); } @@ -211,12 +203,7 @@ namespace SparkleLib { } else { File.WriteAllText (ssh_config_file_path, new_ssh_config.Trim ()); - -#if __MonoCS__ - UnixFileSystemInfo file_info = new UnixFileInfo (ssh_config_file_path); - file_info.FileAccessPermissions = (FileAccessPermissions.UserRead | - FileAccessPermissions.UserWrite); -#endif + Chmod644 (ssh_config_file_path); } } @@ -234,5 +221,15 @@ namespace SparkleLib { else return null; } + + + private void Chmod644 (string file_path) + { + // Hack to be able to set the permissions on a file + // that OpenSSH still likes without resorting to Mono.Unix + FileInfo file_info = new FileInfo (file_path); + file_info.Attributes = FileAttributes.ReadOnly; + file_info.Attributes = FileAttributes.Normal; + } } } diff --git a/SparkleLib/SparkleLib.csproj b/SparkleLib/SparkleLib.csproj index 8e18cd24..a0ac9494 100755 --- a/SparkleLib/SparkleLib.csproj +++ b/SparkleLib/SparkleLib.csproj @@ -31,10 +31,6 @@ - - False - ..\bin\Meebey.SmartIrc4net.dll - diff --git a/SparkleLib/SparkleListenerBase.cs b/SparkleLib/SparkleListenerBase.cs index bba5b171..1770c04a 100755 --- a/SparkleLib/SparkleListenerBase.cs +++ b/SparkleLib/SparkleListenerBase.cs @@ -75,9 +75,6 @@ namespace SparkleLib { case "tcp": listeners.Add (new SparkleListenerTcp (announce_uri, folder_identifier)); break; - case "irc": - listeners.Add (new SparkleListenerIrc (announce_uri, folder_identifier)); - break; default: listeners.Add (new SparkleListenerTcp (announce_uri, folder_identifier)); break; diff --git a/SparkleLib/SparkleListenerIrc.cs b/SparkleLib/SparkleListenerIrc.cs deleted file mode 100644 index ff8821fe..00000000 --- a/SparkleLib/SparkleListenerIrc.cs +++ /dev/null @@ -1,221 +0,0 @@ -// SparkleShare, a collaboration and sharing tool. -// Copyright (C) 2010 Hylke Bons -// -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. -// -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. -// -// You should have received a copy of the GNU General Public License -// along with this program. If not, see . - - -using System; -using System.Text; -using System.Threading; -using System.Security.Cryptography; - -using Meebey.SmartIrc4net; - -namespace SparkleLib { - - public class SparkleListenerIrc : SparkleListenerBase { - - private Thread thread; - private IrcClient client; - private string nick; - private string announcements_password; - private bool allow_passwordless_join; - - - public SparkleListenerIrc (Uri server, string folder_identifier) : - base (server, folder_identifier) - { - // Try to get a uniqueish nickname - this.nick = SHA1 (DateTime.Now.ToString ("ffffff") + "sparkles"); - - // Most irc servers don't allow nicknames starting - // with a number, so prefix an alphabetic character - this.nick = "s" + this.nick.Substring (0, 7); - - // Optional password to make the channel access more safe - this.announcements_password = - SparkleConfig.DefaultConfig.GetConfigOption ("announcements_password"); - - // Option to allow access to channel when no password is defined - try { - string option = SparkleConfig.DefaultConfig.GetConfigOption ("allow_passwordless_join"); - this.allow_passwordless_join = (option == null || Convert.ToBoolean (option)); - } catch (Exception) { - this.allow_passwordless_join = true; - } - - base.channels.Add ("#" + folder_identifier); - - this.client = new IrcClient () { - PingTimeout = 180, - PingInterval = 60 - }; - - string proxy = Environment.GetEnvironmentVariable ("http_proxy"); - Uri proxy_uri = null; - if (!String.IsNullOrEmpty (proxy) && - Uri.TryCreate (proxy, UriKind.Absolute, out proxy_uri)) { - -#if __MonoCS__ - if (proxy_uri.Scheme == "http") { - this.client.ProxyType = ProxyType.Http; - this.client.ProxyHost = proxy_uri.Host; - this.client.ProxyPort = proxy_uri.Port; - } -#endif - } - - this.client.OnConnected += delegate { - base.is_connecting = false; - OnConnected (); - }; - - this.client.OnDisconnected += delegate { - base.is_connecting = false; - OnDisconnected (); - }; - - this.client.OnError += delegate { - base.is_connecting = false; - OnDisconnected (); - }; - - this.client.OnChannelMessage += delegate (object o, IrcEventArgs args) { - string message = args.Data.Message.Trim (); - string folder_id = args.Data.Channel.Substring (1); // remove the starting hash - OnAnnouncement (new SparkleAnnouncement (folder_id, message)); - }; - } - - - public override bool IsConnected { - get { - return this.client.IsConnected; - } - } - - - // Starts a new thread and listens to the channel - public override void Connect () - { - SparkleHelpers.DebugInfo ("ListenerIrc", "Connecting to " + Server); - - base.is_connecting = true; - - this.thread = new Thread ( - new ThreadStart (delegate { - try { - // Connect, login, and join the channel - int port = base.server.Port; - - if (port < 0) - port = 6667; - - this.client.Connect (base.server.Host, port); - this.client.Login (this.nick, this.nick, 8, this.nick); - - foreach (string channel in base.channels) { - SparkleHelpers.DebugInfo ("ListenerIrc", "Joining channel " + channel); - - if (!string.IsNullOrEmpty (this.announcements_password)) { - SparkleHelpers.DebugInfo ("ListenerIrc", "Password set to access the channel"); - this.client.RfcJoin (channel, this.announcements_password); - this.client.RfcMode (channel, "+k " + this.announcements_password); - - } else { - if (this.allow_passwordless_join) { - SparkleHelpers.DebugInfo ("ListenerIrc", "Accessing unprotected channel, change the setting to not access"); - this.client.RfcJoin (channel); - - } else { - SparkleHelpers.DebugInfo ("ListenerIrc", "Unprotected channel, change the setting to access"); - base.is_connecting = false; - OnDisconnected (); - throw new ConnectionException ("Unprotected channel, change the setting to access"); - } - } - - this.client.RfcMode (channel, "+s"); - } - - // List to the channel, this blocks the thread - this.client.Listen (); - - // Disconnect when we time out - this.client.Disconnect (); - - } catch (ConnectionException e) { - SparkleHelpers.DebugInfo ("ListenerIrc", "Could not connect to " + Server + ": " + e.Message); - } - }) - ); - - this.thread.Start (); - } - - - public override void AlsoListenTo (string folder_identifier) - { - string channel = "#" + folder_identifier; - if (!base.channels.Contains (channel)) { - base.channels.Add (channel); - - if (IsConnected) { - SparkleHelpers.DebugInfo ("ListenerIrc", "Joining channel " + channel); - if (this.announcements_password != null) { - SparkleHelpers.DebugInfo ("ListenerIrc", "Password set to access the channel"); - this.client.RfcJoin (channel, this.announcements_password); - this.client.RfcMode (channel, "+k " + this.announcements_password); - } else { - if (allow_passwordless_join) { - SparkleHelpers.DebugInfo ("ListenerIrc", "Accessing a dangerous channel change the setting to not access"); - this.client.RfcJoin (channel); - } else { - SparkleHelpers.DebugInfo ("ListenerIrc", "Dangerous channel, change the setting to access"); - } - } - this.client.RfcMode (channel, "+s"); - } - } - } - - - public override void Announce (SparkleAnnouncement announcement) - { - string channel = "#" + announcement.FolderIdentifier; - this.client.SendMessage (SendType.Message, channel, announcement.Message); - - // Also announce to ourselves for debugging purposes - // base.OnAnnouncement (announcement); - } - - - public override void Dispose () - { - this.thread.Abort (); - this.thread.Join (); - base.Dispose (); - } - - - // Creates a SHA-1 hash of input - private string SHA1 (string s) - { - SHA1 sha1 = new SHA1CryptoServiceProvider (); - Byte[] bytes = ASCIIEncoding.Default.GetBytes (s); - Byte[] encoded_bytes = sha1.ComputeHash (bytes); - return BitConverter.ToString (encoded_bytes).ToLower ().Replace ("-", ""); - } - } -} diff --git a/SparkleLib/SparklePath.cs b/SparkleLib/SparklePath.cs deleted file mode 100644 index 6529008a..00000000 --- a/SparkleLib/SparklePath.cs +++ /dev/null @@ -1,37 +0,0 @@ -// SparkleShare, a collaboration and sharing tool. -// Copyright (C) 2010 Hylke Bons -// -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. -// -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. -// -// You should have received a copy of the GNU General Public License -// along with this program. If not, see . - - -using System; -using System.IO; - -namespace SparkleLib { - - public static class SparklePaths { - - public static string HomePath = Environment.GetFolderPath (Environment.SpecialFolder.Personal); - public static string SparklePath = Path.Combine (HomePath ,"SparkleShare"); - public static string SparkleTmpPath = Path.Combine (SparklePath, ".tmp"); - public static string SparkleConfigPath = Path.Combine (Environment.GetFolderPath ( - Environment.SpecialFolder.ApplicationData), "sparkleshare"); - public static string SparkleLocalIconPath = Path.Combine (SparkleConfigPath, "icons"); - - public static string SparkleInstallPath = Path.Combine (Defines.PREFIX, "sparkleshare"); - public static string SparkleIconPath = SparkleHelpers.CombineMore (Defines.DATAROOTDIR, "sparkleshare", "icons"); - - } -} - diff --git a/SparkleLib/SparkleRepoBase.cs b/SparkleLib/SparkleRepoBase.cs index 86aa8779..05fad586 100755 --- a/SparkleLib/SparkleRepoBase.cs +++ b/SparkleLib/SparkleRepoBase.cs @@ -44,11 +44,13 @@ namespace SparkleLib { private TimeSpan poll_interval; private System.Timers.Timer local_timer = new System.Timers.Timer () { Interval = 0.25 * 1000 }; private System.Timers.Timer remote_timer = new System.Timers.Timer () { Interval = 10 * 1000 }; - private DateTime last_poll = DateTime.Now; - private List sizebuffer = new List (); - private bool has_changed = false; - private Object change_lock = new Object (); - private Object watch_lock = new Object (); + private DateTime last_poll = DateTime.Now; + private List sizebuffer = new List (); + private bool has_changed = false; + private Object change_lock = new Object (); + private Object watch_lock = new Object (); + private double progress_percentage = 0.0; + private string progress_speed = ""; protected SparkleListenerBase listener; protected SyncStatus status; @@ -64,19 +66,23 @@ namespace SparkleLib { public abstract string CurrentRevision { get; } public abstract bool SyncUp (); public abstract bool SyncDown (); + public abstract double CalculateSize (DirectoryInfo parent); public abstract bool HasUnsyncedChanges { get; set; } + public abstract List ExcludePaths { get; } + + public abstract double Size { get; } + public abstract double HistorySize { get; } - public abstract bool AddExclusionRule (FileSystemEventArgs args); - public abstract bool RemoveExclusionRule (FileSystemEventArgs args); - public abstract bool ExclusionRuleExists (FileSystemEventArgs args); - public delegate void SyncStatusChangedEventHandler (SyncStatus new_status); public event SyncStatusChangedEventHandler SyncStatusChanged; + public delegate void SyncProgressChangedEventHandler (double percentage, string speed); + public event SyncProgressChangedEventHandler SyncProgressChanged; + public delegate void NewChangeSetEventHandler (SparkleChangeSet change_set); public event NewChangeSetEventHandler NewChangeSet; - public delegate void NewNoteEventHandler (string user_name, string user_email); + public delegate void NewNoteEventHandler (SparkleUser user); public event NewNoteEventHandler NewNote; public delegate void ConflictResolvedEventHandler (); @@ -108,8 +114,6 @@ namespace SparkleLib { }; this.remote_timer.Elapsed += delegate { - string identifier = Identifier; - bool time_to_poll = (DateTime.Compare (this.last_poll, DateTime.Now.Subtract (this.poll_interval)) < 0); @@ -122,7 +126,7 @@ namespace SparkleLib { // In the unlikely case that we haven't synced up our // changes or the server was down, sync up again - if (HasUnsyncedChanges) + if (HasUnsyncedChanges && !IsSyncing && this.server_online) SyncUpBase (); }; @@ -157,6 +161,20 @@ namespace SparkleLib { } + public double ProgressPercentage { + get { + return this.progress_percentage; + } + } + + + public string ProgressSpeed { + get { + return this.progress_speed; + } + } + + public virtual string [] UnsyncedFilePaths { get { return new string [0]; @@ -186,7 +204,7 @@ namespace SparkleLib { } - public virtual bool CheckForRemoteChanges () // HasRemoteChanges { get; } ? + public virtual bool CheckForRemoteChanges () // TODO: HasRemoteChanges { get; } { return true; } @@ -223,11 +241,8 @@ namespace SparkleLib { public void Dispose () { this.remote_timer.Dispose (); - this.remote_timer = null; this.local_timer.Dispose (); - this.local_timer = null; this.listener.Dispose (); - this.listener = null; } @@ -301,7 +316,7 @@ namespace SparkleLib { } - private bool IsSyncing { + public bool IsSyncing { get { return (Status == SyncStatus.SyncUp || Status == SyncStatus.SyncDown || @@ -318,7 +333,7 @@ namespace SparkleLib { this.sizebuffer.RemoveAt (0); DirectoryInfo dir_info = new DirectoryInfo (LocalPath); - this.sizebuffer.Add (CalculateFolderSize (dir_info)); + this.sizebuffer.Add (CalculateSize (dir_info)); if (this.sizebuffer.Count >= 4 && this.sizebuffer [0].Equals (this.sizebuffer [1]) && @@ -347,42 +362,13 @@ namespace SparkleLib { if (!this.watcher.EnableRaisingEvents) return; - if (args.FullPath.Contains (Path.DirectorySeparatorChar + ".") && - !args.FullPath.Contains (Path.DirectorySeparatorChar + ".notes")) - return; + string relative_path = args.FullPath.Replace (LocalPath, ""); - /* - * Check whether the file which triggered the action - * is readable so that git can actually commit it - */ - try { - if(File.Exists(args.FullPath)) { - FileStream file = File.OpenRead(args.FullPath); - file.Close(); - } - } catch { - if(!ExclusionRuleExists(args)) { - SparkleHelpers.DebugInfo("Warning", "File " + args.FullPath + " is not readable. Adding to ignore list."); - AddExclusionRule(args); - } - - return; - } - - /* - * Remove rule if file is readable but there is still - * an exclusion rule set - */ - if(ExclusionRuleExists(args)) { - SparkleHelpers.DebugInfo("Info", "Removing exclusion rule for " + args.Name); - RemoveExclusionRule(args); - - // If a file was former unreadable but has now been (re)moved, skip the process. - if(!File.Exists(args.FullPath)) { + foreach (string exclude_path in ExcludePaths) { + if (relative_path.Contains (exclude_path)) return; - } } - + WatcherChangeTypes wct = args.ChangeType; if (AnyDifferences) { @@ -402,8 +388,7 @@ namespace SparkleLib { SparkleHelpers.DebugInfo ("Event", "[" + Name + "] " + wct.ToString () + " '" + args.Name + "'"); SparkleHelpers.DebugInfo ("Event", "[" + Name + "] Changes found, checking if settled."); - if (this.remote_timer != null) - this.remote_timer.Stop (); + this.remote_timer.Stop (); lock (this.change_lock) { this.has_changed = true; @@ -452,8 +437,7 @@ namespace SparkleLib { { try { DisableWatching (); - if (this.remote_timer != null) - this.remote_timer.Stop (); + this.remote_timer.Stop (); SparkleHelpers.DebugInfo ("SyncUp", "[" + Name + "] Initiated"); @@ -468,34 +452,37 @@ namespace SparkleLib { if (SyncStatusChanged != null) SyncStatusChanged (SyncStatus.Idle); - if (this.listener != null) - this.listener.AnnounceBase (new SparkleAnnouncement(Identifier, CurrentRevision)); + this.listener.AnnounceBase (new SparkleAnnouncement (Identifier, CurrentRevision)); } else { SparkleHelpers.DebugInfo ("SyncUp", "[" + Name + "] Error"); HasUnsyncedChanges = true; SyncDownBase (); + DisableWatching (); - if (SyncUp ()) { + if (this.server_online && SyncUp ()) { HasUnsyncedChanges = false; if (SyncStatusChanged != null) SyncStatusChanged (SyncStatus.Idle); - if (this.listener != null) - this.listener.AnnounceBase (new SparkleAnnouncement (Identifier, CurrentRevision)); + this.listener.AnnounceBase (new SparkleAnnouncement (Identifier, CurrentRevision)); } else { + this.server_online = false; + if (SyncStatusChanged != null) SyncStatusChanged (SyncStatus.Error); } } } finally { - if (this.remote_timer != null) - this.remote_timer.Start (); + this.remote_timer.Start (); EnableWatching (); + + this.progress_percentage = 0.0; + this.progress_speed = ""; } } @@ -503,8 +490,7 @@ namespace SparkleLib { private void SyncDownBase () { SparkleHelpers.DebugInfo ("SyncDown", "[" + Name + "] Initiated"); - if (this.remote_timer != null) - this.remote_timer.Stop (); + this.remote_timer.Stop (); DisableWatching (); if (SyncStatusChanged != null) @@ -529,7 +515,7 @@ namespace SparkleLib { foreach (string added in change_set.Added) { if (added.Contains (".notes")) { if (NewNote != null) - NewNote (change_set.User.Name, change_set.User.Email); + NewNote (change_set.User); note_added = true; break; @@ -560,28 +546,28 @@ namespace SparkleLib { if (SyncStatusChanged != null) SyncStatusChanged (SyncStatus.Idle); - if (this.remote_timer != null) - this.remote_timer.Start (); + this.remote_timer.Start (); EnableWatching (); + + this.progress_percentage = 0.0; + this.progress_speed = ""; } public void DisableWatching () { - lock (watch_lock) { + lock (this.watch_lock) { this.watcher.EnableRaisingEvents = false; - if (this.local_timer != null) - this.local_timer.Stop (); + this.local_timer.Stop (); } } public void EnableWatching () { - lock (watch_lock) { + lock (this.watch_lock) { this.watcher.EnableRaisingEvents = true; - if (this.local_timer != null) - this.local_timer.Start (); + this.local_timer.Start (); } } @@ -635,30 +621,15 @@ namespace SparkleLib { } - // Recursively gets a folder's size in bytes - private double CalculateFolderSize (DirectoryInfo parent) + protected void OnSyncProgressChanged (double progress_percentage, string progress_speed) { - if (!System.IO.Directory.Exists (parent.ToString ())) - return 0; + // Console.WriteLine ("OnProgressChanged: " + progress_percentage + " " + progress_speed); - double size = 0; + this.progress_percentage = progress_percentage; + this.progress_speed = progress_speed; - // Ignore the temporary 'rebase-apply' directory. This prevents potential - // crashes when files are being queried whilst the files have already been deleted. - if (parent.Name.Equals ("rebase-apply")) - return 0; - - foreach (FileInfo file in parent.GetFiles ()) { - if (!file.Exists) - return 0; - - size += file.Length; - } - - foreach (DirectoryInfo directory in parent.GetDirectories()) - size += CalculateFolderSize (directory); - - return size; + if (SyncProgressChanged != null) + SyncProgressChanged (progress_percentage, progress_speed); } diff --git a/SparkleLib/SparkleWatcher.cs b/SparkleLib/SparkleWatcher.cs index ae0ec573..f1c477fd 100755 --- a/SparkleLib/SparkleWatcher.cs +++ b/SparkleLib/SparkleWatcher.cs @@ -51,7 +51,6 @@ namespace SparkleLib { if (ChangeEvent != null) ChangeEvent (args); }; - } } } diff --git a/SparkleShare/Mac/Info.plist b/SparkleShare/Mac/Info.plist index 97d1ab96..4867455e 100755 --- a/SparkleShare/Mac/Info.plist +++ b/SparkleShare/Mac/Info.plist @@ -8,9 +8,11 @@ org.sparkleshare.sparkleshare CFBundleName SparkleShare + CFBundleVersion + 1 LSMinimumSystemVersion 10.6 - CFBundleVersion + LSUIElement 1 NSMainNibFile MainMenu diff --git a/SparkleShare/Mac/Makefile.am b/SparkleShare/Mac/Makefile.am index c7df923a..6beab296 100755 --- a/SparkleShare/Mac/Makefile.am +++ b/SparkleShare/Mac/Makefile.am @@ -6,7 +6,6 @@ EXTRA_DIST = \ MainMenu.xib \ MainMenu.xib.designer.cs \ SparkleAbout.cs \ - SparkleAlert.cs \ SparkleBadger.cs \ SparkleBubbles.cs \ SparkleController.cs \ diff --git a/SparkleShare/Mac/SparkleAbout.cs b/SparkleShare/Mac/SparkleAbout.cs index 64a3fc93..27d5715b 100755 --- a/SparkleShare/Mac/SparkleAbout.cs +++ b/SparkleShare/Mac/SparkleAbout.cs @@ -57,6 +57,8 @@ namespace SparkleShare { OrderFrontRegardless (); MakeKeyAndOrderFront (this); + Program.UI.UpdateDockIconVisibility (); + Controller.NewVersionEvent += delegate (string new_version) { InvokeOnMainThread (delegate { UpdatesTextField.StringValue = "A newer version (" + new_version + ") is available!"; @@ -161,6 +163,8 @@ namespace SparkleShare { public override bool WindowShouldClose (NSObject sender) { (sender as SparkleAbout).OrderOut (this); + Program.UI.UpdateDockIconVisibility (); + return false; } } diff --git a/SparkleShare/Mac/SparkleAlert.cs b/SparkleShare/Mac/SparkleAlert.cs deleted file mode 100755 index c74d3600..00000000 --- a/SparkleShare/Mac/SparkleAlert.cs +++ /dev/null @@ -1,53 +0,0 @@ -// SparkleShare, a collaboration and sharing tool. -// Copyright (C) 2010 Hylke Bons -// -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. -// -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. -// -// You should have received a copy of the GNU General Public License -// along with this program. If not, see . - - -using System; -using System.Collections.Generic; -using System.Drawing; -using System.IO; - -using MonoMac.Foundation; -using MonoMac.AppKit; -using MonoMac.ObjCRuntime; -using MonoMac.WebKit; - -namespace SparkleShare { - - public class SparkleAlert : NSAlert { - - public SparkleAlert () : base () - { - MessageText = "SparkleShare couldn't find Git on your system. Do you want to download it?"; - InformativeText = "Git is required to run SparkleShare."; - - Icon = NSImage.ImageNamed ("sparkleshare.icns"); - - AddButton ("Download"); - AddButton ("Cancel"); - - Buttons [0].Activated += delegate { - NSUrl url = new NSUrl ("http://code.google.com/p/git-osx-installer/downloads/list"); - NSWorkspace.SharedWorkspace.OpenUrl (url); - Environment.Exit (0); - }; - - Buttons [1].Activated += delegate { - Environment.Exit (-1); - }; - } - } -} diff --git a/SparkleShare/Mac/SparkleController.cs b/SparkleShare/Mac/SparkleController.cs index f8a503f8..3b4f0e4b 100755 --- a/SparkleShare/Mac/SparkleController.cs +++ b/SparkleShare/Mac/SparkleController.cs @@ -41,6 +41,21 @@ namespace SparkleShare { public SparkleController () : base () { + string content_path = + Directory.GetParent (System.AppDomain.CurrentDomain.BaseDirectory) + .ToString (); + + string app_path = Directory.GetParent (content_path).ToString (); + string growl_path = Path.Combine (app_path, "Frameworks", "Growl.framework", "Growl"); + + // Needed for Growl + Dlfcn.dlopen (growl_path, 0); + NSApplication.Init (); + + // Let's use the bundled git first + SparkleBackend.DefaultBackend.Path = + Path.Combine (NSBundle.MainBundle.ResourcePath, + "git", "bin", "git"); } @@ -189,11 +204,10 @@ namespace SparkleShare { } - new public void Quit () + public override void OpenFile (string url) { - this.watcher.Dispose (); - NSApplication.SharedApplication.Terminate (new NSObject ()); - base.Quit (); + url = url.Replace ("%20", " "); + NSWorkspace.SharedWorkspace.OpenFile (url); } } } diff --git a/SparkleShare/Mac/SparkleEventLog.cs b/SparkleShare/Mac/SparkleEventLog.cs index 27674b43..38af8234 100755 --- a/SparkleShare/Mac/SparkleEventLog.cs +++ b/SparkleShare/Mac/SparkleEventLog.cs @@ -18,7 +18,6 @@ using System; using System.Drawing; using System.IO; -using System.Text.RegularExpressions; using System.Threading; using MonoMac.Foundation; @@ -36,14 +35,17 @@ namespace SparkleShare { PolicyDelegate = new SparkleWebPolicyDelegate () }; - private NSBox Separator = new NSBox (new RectangleF (0, 579, 480, 1)) { + private NSBox separator = new NSBox (new RectangleF (0, 579, 480, 1)) { BorderColor = NSColor.LightGray, BoxType = NSBoxType.NSBoxCustom }; private NSPopUpButton popup_button; private NSProgressIndicator progress_indicator; - + private NSTextField size_label; + private NSTextField size_label_value; + private NSTextField history_label; + private NSTextField history_label_value; public SparkleEventLog (IntPtr handle) : base (handle) { } @@ -65,7 +67,54 @@ namespace SparkleShare { HasShadow = true; BackingType = NSBackingStore.Buffered; - ContentView.AddSubview (Separator); + + this.size_label = new NSTextField () { + Alignment = NSTextAlignment.Right, + BackgroundColor = NSColor.WindowBackground, + Bordered = false, + Editable = false, + Frame = new RectangleF (0, 588, 60, 20), + StringValue = "Size:", + Font = SparkleUI.BoldFont + }; + + this.size_label_value = new NSTextField () { + Alignment = NSTextAlignment.Left, + BackgroundColor = NSColor.WindowBackground, + Bordered = false, + Editable = false, + Frame = new RectangleF (60, 588, 75, 20), + StringValue = Controller.Size, + Font = SparkleUI.Font + }; + + + this.history_label = new NSTextField () { + Alignment = NSTextAlignment.Right, + BackgroundColor = NSColor.WindowBackground, + Bordered = false, + Editable = false, + Frame = new RectangleF (130, 588, 60, 20), + StringValue = "History:", + Font = SparkleUI.BoldFont + }; + + this.history_label_value = new NSTextField () { + Alignment = NSTextAlignment.Left, + BackgroundColor = NSColor.WindowBackground, + Bordered = false, + Editable = false, + Frame = new RectangleF (190, 588, 75, 20), + StringValue = Controller.HistorySize, + Font = SparkleUI.Font + }; + + + ContentView.AddSubview (this.size_label); + ContentView.AddSubview (this.size_label_value); + ContentView.AddSubview (this.history_label); + ContentView.AddSubview (this.history_label_value); + ContentView.AddSubview (this.separator); this.progress_indicator = new NSProgressIndicator () { @@ -81,6 +130,7 @@ namespace SparkleShare { UpdateChooser (null); OrderFrontRegardless (); + Program.UI.UpdateDockIconVisibility (); // Hook up the controller events Controller.UpdateChooserEvent += delegate (string [] folders) { @@ -103,6 +153,13 @@ namespace SparkleShare { ContentView.AddSubview (this.progress_indicator); }); }; + + Controller.UpdateSizeInfoEvent += delegate (string size, string history_size) { + InvokeOnMainThread (delegate { + this.size_label_value.StringValue = size; + this.history_label_value.StringValue = history_size; + }); + }; } @@ -123,7 +180,7 @@ namespace SparkleShare { this.popup_button.Font = NSFontManager.SharedFontManager.FontWithFamily ("Lucida Grande", NSFontTraitMask.Condensed, 0, NSFont.SmallSystemFontSize); - this.popup_button.AddItem ("All Folders"); + this.popup_button.AddItem ("All Projects"); this.popup_button.Menu.AddItem (NSMenuItem.SeparatorItem); this.popup_button.AddItems (folders); @@ -184,6 +241,8 @@ namespace SparkleShare { public override bool WindowShouldClose (NSObject sender) { (sender as SparkleEventLog).OrderOut (this); + Program.UI.UpdateDockIconVisibility (); + return false; } } @@ -194,30 +253,7 @@ namespace SparkleShare { public override void DecidePolicyForNavigation (WebView web_view, NSDictionary action_info, NSUrlRequest request, WebFrame frame, NSObject decision_token) { - string url = request.Url.ToString (); - - if (url.StartsWith (Path.VolumeSeparatorChar.ToString ())) { - string file_path = request.Url.ToString (); - file_path = file_path.Replace ("%20", " "); - - NSWorkspace.SharedWorkspace.OpenFile (file_path); - - } else { - Regex regex = new Regex (@"(.+)~(.+)~(.+)"); - Match match = regex.Match (url); - - if (match.Success) { - string folder_name = match.Groups [1].Value; - string revision = match.Groups [2].Value; - string note = match.Groups [3].Value; - - Thread thread = new Thread (new ThreadStart (delegate { - Program.Controller.AddNoteToFolder (folder_name, revision, note); - })); - - thread.Start (); - } - } + SparkleEventLogController.LinkClicked (request.Url.ToString ()); } } } diff --git a/SparkleShare/Mac/SparkleMacWatcher.cs b/SparkleShare/Mac/SparkleMacWatcher.cs index 1acf2627..75123de2 100755 --- a/SparkleShare/Mac/SparkleMacWatcher.cs +++ b/SparkleShare/Mac/SparkleMacWatcher.cs @@ -28,8 +28,9 @@ namespace SparkleShare { public delegate void ChangedEventHandler (string path); public event ChangedEventHandler Changed; - private DirectoryInfo last_changed; + private FileSystemInfo last_changed; private Thread thread; + private int poll_count = 0; public SparkleMacWatcher (string path) @@ -50,7 +51,8 @@ namespace SparkleShare { Changed (relative_path); } - Thread.Sleep (2500); + Thread.Sleep (7500); + this.poll_count++; } })); @@ -73,7 +75,20 @@ namespace SparkleShare { } } - } catch (Exception) { } + if (this.poll_count >= 8) { + foreach (FileInfo info in parent.GetFiles ()) { + if (!info.FullName.Contains ("/.")) { + if (DateTime.Compare (info.LastWriteTime, this.last_changed.LastWriteTime) > 0) + this.last_changed = info; + } + } + + this.poll_count = 0; + } + + } catch (Exception) { + // Don't care... + } } diff --git a/SparkleShare/Mac/SparkleSetup.cs b/SparkleShare/Mac/SparkleSetup.cs index f03cc37b..a884b072 100755 --- a/SparkleShare/Mac/SparkleSetup.cs +++ b/SparkleShare/Mac/SparkleSetup.cs @@ -16,11 +16,9 @@ using System; +using System.Collections.Generic; using System.Drawing; using System.IO; -using System.Timers; - -using System.Collections.Generic; using Mono.Unix; using MonoMac.Foundation; @@ -54,7 +52,9 @@ namespace SparkleShare { private NSTextField PathLabel; private NSTextField PathHelpLabel; private NSTextField AddProjectTextField; - private Timer timer; + private NSTextField WarningTextField; + private NSImage WarningImage; + private NSImageView WarningImageView; private NSTableView TableView; private NSScrollView ScrollView; private NSTableColumn IconColumn; @@ -64,7 +64,7 @@ namespace SparkleShare { public SparkleSetup () : base () { - Controller.ChangePageEvent += delegate (PageType type) { + Controller.ChangePageEvent += delegate (PageType type, string [] warnings) { InvokeOnMainThread (delegate { Reset (); @@ -72,8 +72,8 @@ namespace SparkleShare { case PageType.Setup: { Header = "Welcome to SparkleShare!"; - Description = "Before we can create a SparkleShare folder on this " + - "computer, we need some information from you."; + Description = "We'll need some info to mark your changes in the event log. " + + "Don't worry, this stays between you and your peers."; FullNameLabel = new NSTextField () { @@ -87,8 +87,9 @@ namespace SparkleShare { }; FullNameTextField = new NSTextField () { - Frame = new RectangleF (330, Frame.Height - 238, 196, 22), - StringValue = Controller.GuessedUserName + Frame = new RectangleF (330, Frame.Height - 238, 196, 22), + StringValue = Controller.GuessedUserName, + Delegate = new SparkleTextFieldDelegate () }; EmailLabel = new NSTextField () { @@ -102,42 +103,45 @@ namespace SparkleShare { }; EmailTextField = new NSTextField () { - Frame = new RectangleF (330, Frame.Height - 268, 196, 22), - StringValue = Controller.GuessedUserEmail + Frame = new RectangleF (330, Frame.Height - 268, 196, 22), + StringValue = Controller.GuessedUserEmail, + Delegate = new SparkleTextFieldDelegate () }; - // TODO: Ugly hack, do properly with events - timer = new Timer () { - Interval = 50 + + (FullNameTextField.Delegate as SparkleTextFieldDelegate).StringValueChanged += delegate { + Controller.CheckSetupPage ( + FullNameTextField.StringValue, + EmailTextField.StringValue + ); }; + (EmailTextField.Delegate as SparkleTextFieldDelegate).StringValueChanged += delegate { + Controller.CheckSetupPage ( + FullNameTextField.StringValue, + EmailTextField.StringValue + ); + }; + + ContinueButton = new NSButton () { Title = "Continue", Enabled = false }; ContinueButton.Activated += delegate { - timer.Stop (); - timer = null; - string full_name = FullNameTextField.StringValue.Trim (); string email = EmailTextField.StringValue.Trim (); Controller.SetupPageCompleted (full_name, email); }; - timer.Elapsed += delegate { + Controller.UpdateSetupContinueButtonEvent += delegate (bool button_enabled) { InvokeOnMainThread (delegate { - bool name_is_valid = !FullNameTextField.StringValue.Trim ().Equals (""); - - bool email_is_valid = Program.Controller.IsValidEmail ( - EmailTextField.StringValue.Trim ()); - - ContinueButton.Enabled = (name_is_valid && email_is_valid); + ContinueButton.Enabled = button_enabled; }); }; - timer.Start (); ContentView.AddSubview (FullNameLabel); ContentView.AddSubview (FullNameTextField); @@ -146,6 +150,11 @@ namespace SparkleShare { Buttons.Add (ContinueButton); + Controller.CheckSetupPage ( + FullNameTextField.StringValue, + EmailTextField.StringValue + ); + break; } @@ -168,7 +177,8 @@ namespace SparkleShare { Frame = new RectangleF (190, Frame.Height - 336, 196, 22), Font = SparkleUI.Font, StringValue = Controller.PreviousAddress, - Enabled = (Controller.SelectedPlugin.Address == null) + Enabled = (Controller.SelectedPlugin.Address == null), + Delegate = new SparkleTextFieldDelegate () }; @@ -186,12 +196,12 @@ namespace SparkleShare { Frame = new RectangleF (190 + 196 + 16, Frame.Height - 336, 196, 22), StringValue = Controller.PreviousPath, Enabled = (Controller.SelectedPlugin.Path == null), - Bezeled = true + Delegate = new SparkleTextFieldDelegate () }; - AddressTextField.Cell.LineBreakMode = NSLineBreakMode.TruncatingTail; - PathTextField.Cell.LineBreakMode = NSLineBreakMode.TruncatingTail; + AddressTextField.Cell.LineBreakMode = NSLineBreakMode.TruncatingTail; + PathTextField.Cell.LineBreakMode = NSLineBreakMode.TruncatingTail; PathHelpLabel = new NSTextField () { @@ -210,7 +220,8 @@ namespace SparkleShare { Frame = new RectangleF (0, 0, 0, 0), RowHeight = 30, IntercellSpacing = new SizeF (0, 12), - HeaderView = null + HeaderView = null, + Delegate = new SparkleTableViewDelegate () }; ScrollView = new NSScrollView () { @@ -270,33 +281,43 @@ namespace SparkleShare { }); }; + TableView.SelectRow (Controller.SelectedPluginIndex, false); - timer = new Timer () { - Interval = 50 + (AddressTextField.Delegate as SparkleTextFieldDelegate).StringValueChanged += delegate { + Controller.CheckAddPage ( + AddressTextField.StringValue, + PathTextField.StringValue, + TableView.SelectedRow + ); }; - // TODO: Use an event - timer.Elapsed += delegate { - if (TableView.SelectedRow != Controller.SelectedPluginIndex) - Controller.SelectedPluginChanged (TableView.SelectedRow); + (PathTextField.Delegate as SparkleTextFieldDelegate).StringValueChanged += delegate { + Controller.CheckAddPage ( + AddressTextField.StringValue, + PathTextField.StringValue, + TableView.SelectedRow + ); + }; + (TableView.Delegate as SparkleTableViewDelegate).SelectionChanged += delegate { + Controller.SelectedPluginChanged (TableView.SelectedRow); + + Controller.CheckAddPage ( + AddressTextField.StringValue, + PathTextField.StringValue, + TableView.SelectedRow + ); + }; + + + Controller.UpdateAddProjectButtonEvent += delegate (bool button_enabled) { InvokeOnMainThread (delegate { - // TODO: Move checking logic to controller - if (!string.IsNullOrWhiteSpace (AddressTextField.StringValue) && - !string.IsNullOrWhiteSpace (PathTextField.StringValue)) { - - SyncButton.Enabled = true; - - } else { - SyncButton.Enabled = false; - } + SyncButton.Enabled = button_enabled; }); }; - timer.Start (); - ContentView.AddSubview (ScrollView); ContentView.AddSubview (AddressLabel); @@ -311,9 +332,6 @@ namespace SparkleShare { }; SyncButton.Activated += delegate { - timer.Stop (); - timer = null; - Controller.AddPageCompleted ( AddressTextField.StringValue, PathTextField.StringValue @@ -334,6 +352,13 @@ namespace SparkleShare { Buttons.Add (CancelButton); + Controller.CheckAddPage ( + AddressTextField.StringValue, + PathTextField.StringValue, + TableView.SelectedRow + ); + + break; } @@ -442,6 +467,28 @@ namespace SparkleShare { "‘" + Controller.SyncingFolder + "’ in " + "your SparkleShare folder."; + if (warnings != null) { + WarningImage = NSImage.ImageNamed ("NSCaution"); + WarningImage.Size = new SizeF (24, 24); + + WarningImageView = new NSImageView () { + Image = WarningImage, + Frame = new RectangleF (190, Frame.Height - 175, 24, 24) + }; + + WarningTextField = new NSTextField () { + Frame = new RectangleF (230, Frame.Height - 245, 325, 100), + StringValue = warnings [0], + BackgroundColor = NSColor.WindowBackground, + Bordered = false, + Editable = false, + Font = SparkleUI.Font + }; + + ContentView.AddSubview (WarningImageView); + ContentView.AddSubview (WarningTextField); + } + FinishButton = new NSButton () { Title = "Finish" }; @@ -497,7 +544,7 @@ namespace SparkleShare { }; string slide_image_path = Path.Combine (NSBundle.MainBundle.ResourcePath, - "Pixmaps", "tutorial-slide-1.png"); + "Pixmaps", "tutorial-slide-1-mac.png"); SlideImage = new NSImage (slide_image_path) { Size = new SizeF (350, 200) @@ -529,7 +576,7 @@ namespace SparkleShare { }; string slide_image_path = Path.Combine (NSBundle.MainBundle.ResourcePath, - "Pixmaps", "tutorial-slide-2.png"); + "Pixmaps", "tutorial-slide-2-mac.png"); SlideImage = new NSImage (slide_image_path) { Size = new SizeF (350, 200) @@ -560,7 +607,7 @@ namespace SparkleShare { }; string slide_image_path = Path.Combine (NSBundle.MainBundle.ResourcePath, - "Pixmaps", "tutorial-slide-3.png"); + "Pixmaps", "tutorial-slide-3-mac.png"); SlideImage = new NSImage (slide_image_path) { Size = new SizeF (350, 200) @@ -673,4 +720,39 @@ namespace SparkleShare { } } } + + + public class SparkleTextFieldDelegate : NSTextFieldDelegate { + + public event StringValueChangedHandler StringValueChanged; + public delegate void StringValueChangedHandler (); + + + public override void Changed (NSNotification notification) + { + if (StringValueChanged!= null) + StringValueChanged (); + } + + + public override string [] GetCompletions (NSControl control, NSTextView text_view, + string [] a, MonoMac.Foundation.NSRange range, int b) + { + return new string [0]; + } + } + + + public class SparkleTableViewDelegate : NSTableViewDelegate { + + public event SelectionChangedHandler SelectionChanged; + public delegate void SelectionChangedHandler (); + + + public override void SelectionDidChange (NSNotification notification) + { + if (SelectionChanged != null) + SelectionChanged (); + } + } } diff --git a/SparkleShare/Mac/SparkleSetupWindow.cs b/SparkleShare/Mac/SparkleSetupWindow.cs index d0a6ed79..4aa27f42 100755 --- a/SparkleShare/Mac/SparkleSetupWindow.cs +++ b/SparkleShare/Mac/SparkleSetupWindow.cs @@ -87,6 +87,7 @@ namespace SparkleShare { MakeKeyAndOrderFront (this); OrderFrontRegardless (); + Program.UI.UpdateDockIconVisibility (); } @@ -143,8 +144,10 @@ namespace SparkleShare { public override void PerformClose (NSObject sender) { - OrderOut (this); + base.OrderOut (this); NSApplication.SharedApplication.RemoveWindowsItem (this); + Program.UI.UpdateDockIconVisibility (); + return; } diff --git a/SparkleShare/Mac/SparkleShare.csproj b/SparkleShare/Mac/SparkleShare.csproj index d2f87467..f7b1d6f1 100755 --- a/SparkleShare/Mac/SparkleShare.csproj +++ b/SparkleShare/Mac/SparkleShare.csproj @@ -11,7 +11,7 @@ SparkleShare SparkleShare v4.0 - 0.2.5 + 0.6.0 true @@ -24,7 +24,7 @@ false - + @@ -58,15 +58,11 @@ False - - False - ..\..\bin\Meebey.SmartIrc4net.dll - - + + False ..\..\bin\SparkleLib.dll - @@ -83,8 +79,6 @@ Program.cs - - @@ -113,6 +107,13 @@ SparklePlugin.cs + + SparkleOptions.cs + + + + SparkleInvite.cs + @@ -277,6 +278,15 @@ Pixmaps\tutorial-slide-1.png + + Pixmaps\tutorial-slide-1-mac.png + + + Pixmaps\tutorial-slide-2-mac.png + + + Pixmaps\tutorial-slide-3-mac.png + Pixmaps\tutorial-slide-2.png diff --git a/SparkleShare/Mac/SparkleShare.sln b/SparkleShare/Mac/SparkleShare.sln old mode 100755 new mode 100644 index 98e98fe8..d22cce42 --- a/SparkleShare/Mac/SparkleShare.sln +++ b/SparkleShare/Mac/SparkleShare.sln @@ -16,6 +16,6 @@ Global EndGlobalSection GlobalSection(MonoDevelopProperties) = preSolution StartupItem = SparkleShare.csproj - version = 0.2.5 + version = 0.6.0 EndGlobalSection EndGlobal diff --git a/SparkleShare/Mac/SparkleStatusIcon.cs b/SparkleShare/Mac/SparkleStatusIcon.cs index 55563d69..d2d4a9cd 100755 --- a/SparkleShare/Mac/SparkleStatusIcon.cs +++ b/SparkleShare/Mac/SparkleStatusIcon.cs @@ -46,6 +46,7 @@ namespace SparkleShare { private NSMenuItem AboutMenuItem; private NSMenuItem NotificationsMenuItem; private NSMenuItem RecentEventsMenuItem; + private NSMenuItem QuitMenuItem; private NSImage [] AnimationFrames; private NSImage [] AnimationFramesActive; private NSImage ErrorImage; @@ -110,7 +111,10 @@ namespace SparkleShare { case IconState.Syncing: - StateText = _("Syncing…"); + StateText = _("Syncing… " + + Controller.ProgressPercentage + "% " + + Controller.ProgressSpeed); + StateMenuItem.Title = StateText; if (!Animation.Enabled) @@ -119,6 +123,8 @@ namespace SparkleShare { break; case IconState.Error: + + Animation.Stop (); StateText = _("Not everything is synced"); StateMenuItem.Title = StateText; @@ -240,7 +246,7 @@ namespace SparkleShare { RecentEventsMenuItem = new NSMenuItem () { Title = "Open Recent Events", - Enabled = true + Enabled = (Controller.Folders.Length > 0) }; if (Controller.Folders.Length > 0) { @@ -298,10 +304,22 @@ namespace SparkleShare { }); }; + + Menu.AddItem (AboutMenuItem); + Menu.AddItem (NSMenuItem.SeparatorItem); + + + QuitMenuItem = new NSMenuItem () { + Title = "Quit", + Enabled = true + }; - - Menu.AddItem (AboutMenuItem); - + QuitMenuItem.Activated += delegate { + Program.Controller.Quit (); + }; + + Menu.AddItem (QuitMenuItem); + StatusItem.Menu = Menu; StatusItem.Menu.Update (); } diff --git a/SparkleShare/Mac/SparkleUI.cs b/SparkleShare/Mac/SparkleUI.cs index 94dc621b..642cd6f3 100755 --- a/SparkleShare/Mac/SparkleUI.cs +++ b/SparkleShare/Mac/SparkleUI.cs @@ -37,22 +37,11 @@ namespace SparkleShare { public static SparkleBubbles Bubbles; public static SparkleAbout About; public static NSFont Font; - - private NSAlert alert; + public static NSFont BoldFont; public SparkleUI () { - string content_path = Directory.GetParent ( - System.AppDomain.CurrentDomain.BaseDirectory).ToString (); - - string app_path = Directory.GetParent (content_path).ToString (); - string growl_path = Path.Combine (app_path, "Frameworks", "Growl.framework", "Growl"); - - // Needed for Growl - Dlfcn.dlopen (growl_path, 0); - NSApplication.Init (); - // Use translations Catalog.Init ("sparkleshare", Path.Combine (NSBundle.MainBundle.ResourcePath, "Translations")); @@ -66,17 +55,14 @@ namespace SparkleShare { NSApplication.SharedApplication.ApplicationIconImage = NSImage.ImageNamed ("sparkleshare.icns"); - if (!Program.Controller.BackendIsPresent) { - this.alert = new SparkleAlert (); - this.alert.RunModal (); - return; - } - SetFolderIcon (); Font = NSFontManager.SharedFontManager.FontWithFamily ("Lucida Grande", NSFontTraitMask.Condensed, 0, 13); + BoldFont = NSFontManager.SharedFontManager.FontWithFamily + ("Lucida Grande", NSFontTraitMask.Bold, 0, 13); + StatusIcon = new SparkleStatusIcon (); Bubbles = new SparkleBubbles (); @@ -106,6 +92,29 @@ namespace SparkleShare { } + public void UpdateDockIconVisibility () + { + if (true) { // TODO: check for open windows + + ShowDockIcon (); + + } else { + HideDockIcon (); + } + } + + + private void HideDockIcon () { + // Currently not supported, here for completeness sake (see Apple's docs) + // NSApplication.SharedApplication.ActivationPolicy = NSApplicationActivationPolicy.None; + } + + + private void ShowDockIcon () { + NSApplication.SharedApplication.ActivationPolicy = NSApplicationActivationPolicy.Regular; + } + + [Export("registrationDictionaryForGrowl")] NSDictionary RegistrationDictionaryForGrowl () { @@ -122,6 +131,7 @@ namespace SparkleShare { NSApplication.SharedApplication.DockTile.BadgeLabel = null; } + public override void WillTerminate (NSNotification notification) { Program.Controller.Quit (); diff --git a/SparkleShare/Mac/git/LICENSE b/SparkleShare/Mac/git/LICENSE new file mode 100644 index 00000000..fe81dd87 --- /dev/null +++ b/SparkleShare/Mac/git/LICENSE @@ -0,0 +1,370 @@ +From https://github.com/gitster/git: + +//////////////////////////////////////////////////////////////// + + GIT - the stupid content tracker + +//////////////////////////////////////////////////////////////// + +"git" can mean anything, depending on your mood. + + - random three-letter combination that is pronounceable, and not + actually used by any common UNIX command. The fact that it is a + mispronunciation of "get" may or may not be relevant. + - stupid. contemptible and despicable. simple. Take your pick from the + dictionary of slang. + - "global information tracker": you're in a good mood, and it actually + works for you. Angels sing, and a light suddenly fills the room. + - "goddamn idiotic truckload of sh*t": when it breaks + +Git is a fast, scalable, distributed revision control system with an +unusually rich command set that provides both high-level operations +and full access to internals. + +Git is an Open Source project covered by the GNU General Public License. +It was originally written by Linus Torvalds with help of a group of +hackers around the net. It is currently maintained by Junio C Hamano. + +//////////////////////////////////////////////////////////////// + + + GNU GENERAL PUBLIC LICENSE + Version 2, June 1991 + + Copyright (C) 1989, 1991 Free Software Foundation, Inc., + 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + Preamble + + The licenses for most software are designed to take away your +freedom to share and change it. By contrast, the GNU General Public +License is intended to guarantee your freedom to share and change free +software--to make sure the software is free for all its users. This +General Public License applies to most of the Free Software +Foundation's software and to any other program whose authors commit to +using it. (Some other Free Software Foundation software is covered by +the GNU Lesser General Public License instead.) You can apply it to +your programs, too. + + When we speak of free software, we are referring to freedom, not +price. Our General Public Licenses are designed to make sure that you +have the freedom to distribute copies of free software (and charge for +this service if you wish), that you receive source code or can get it +if you want it, that you can change the software or use pieces of it +in new free programs; and that you know you can do these things. + + To protect your rights, we need to make restrictions that forbid +anyone to deny you these rights or to ask you to surrender the rights. +These restrictions translate to certain responsibilities for you if you +distribute copies of the software, or if you modify it. + + For example, if you distribute copies of such a program, whether +gratis or for a fee, you must give the recipients all the rights that +you have. You must make sure that they, too, receive or can get the +source code. And you must show them these terms so they know their +rights. + + We protect your rights with two steps: (1) copyright the software, and +(2) offer you this license which gives you legal permission to copy, +distribute and/or modify the software. + + Also, for each author's protection and ours, we want to make certain +that everyone understands that there is no warranty for this free +software. If the software is modified by someone else and passed on, we +want its recipients to know that what they have is not the original, so +that any problems introduced by others will not reflect on the original +authors' reputations. + + Finally, any free program is threatened constantly by software +patents. We wish to avoid the danger that redistributors of a free +program will individually obtain patent licenses, in effect making the +program proprietary. To prevent this, we have made it clear that any +patent must be licensed for everyone's free use or not licensed at all. + + The precise terms and conditions for copying, distribution and +modification follow. + + GNU GENERAL PUBLIC LICENSE + TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION + + 0. This License applies to any program or other work which contains +a notice placed by the copyright holder saying it may be distributed +under the terms of this General Public License. The "Program", below, +refers to any such program or work, and a "work based on the Program" +means either the Program or any derivative work under copyright law: +that is to say, a work containing the Program or a portion of it, +either verbatim or with modifications and/or translated into another +language. (Hereinafter, translation is included without limitation in +the term "modification".) Each licensee is addressed as "you". + +Activities other than copying, distribution and modification are not +covered by this License; they are outside its scope. The act of +running the Program is not restricted, and the output from the Program +is covered only if its contents constitute a work based on the +Program (independent of having been made by running the Program). +Whether that is true depends on what the Program does. + + 1. You may copy and distribute verbatim copies of the Program's +source code as you receive it, in any medium, provided that you +conspicuously and appropriately publish on each copy an appropriate +copyright notice and disclaimer of warranty; keep intact all the +notices that refer to this License and to the absence of any warranty; +and give any other recipients of the Program a copy of this License +along with the Program. + +You may charge a fee for the physical act of transferring a copy, and +you may at your option offer warranty protection in exchange for a fee. + + 2. You may modify your copy or copies of the Program or any portion +of it, thus forming a work based on the Program, and copy and +distribute such modifications or work under the terms of Section 1 +above, provided that you also meet all of these conditions: + + a) You must cause the modified files to carry prominent notices + stating that you changed the files and the date of any change. + + b) You must cause any work that you distribute or publish, that in + whole or in part contains or is derived from the Program or any + part thereof, to be licensed as a whole at no charge to all third + parties under the terms of this License. + + c) If the modified program normally reads commands interactively + when run, you must cause it, when started running for such + interactive use in the most ordinary way, to print or display an + announcement including an appropriate copyright notice and a + notice that there is no warranty (or else, saying that you provide + a warranty) and that users may redistribute the program under + these conditions, and telling the user how to view a copy of this + License. (Exception: if the Program itself is interactive but + does not normally print such an announcement, your work based on + the Program is not required to print an announcement.) + +These requirements apply to the modified work as a whole. If +identifiable sections of that work are not derived from the Program, +and can be reasonably considered independent and separate works in +themselves, then this License, and its terms, do not apply to those +sections when you distribute them as separate works. But when you +distribute the same sections as part of a whole which is a work based +on the Program, the distribution of the whole must be on the terms of +this License, whose permissions for other licensees extend to the +entire whole, and thus to each and every part regardless of who wrote it. + +Thus, it is not the intent of this section to claim rights or contest +your rights to work written entirely by you; rather, the intent is to +exercise the right to control the distribution of derivative or +collective works based on the Program. + +In addition, mere aggregation of another work not based on the Program +with the Program (or with a work based on the Program) on a volume of +a storage or distribution medium does not bring the other work under +the scope of this License. + + 3. You may copy and distribute the Program (or a work based on it, +under Section 2) in object code or executable form under the terms of +Sections 1 and 2 above provided that you also do one of the following: + + a) Accompany it with the complete corresponding machine-readable + source code, which must be distributed under the terms of Sections + 1 and 2 above on a medium customarily used for software interchange; or, + + b) Accompany it with a written offer, valid for at least three + years, to give any third party, for a charge no more than your + cost of physically performing source distribution, a complete + machine-readable copy of the corresponding source code, to be + distributed under the terms of Sections 1 and 2 above on a medium + customarily used for software interchange; or, + + c) Accompany it with the information you received as to the offer + to distribute corresponding source code. (This alternative is + allowed only for noncommercial distribution and only if you + received the program in object code or executable form with such + an offer, in accord with Subsection b above.) + +The source code for a work means the preferred form of the work for +making modifications to it. For an executable work, complete source +code means all the source code for all modules it contains, plus any +associated interface definition files, plus the scripts used to +control compilation and installation of the executable. However, as a +special exception, the source code distributed need not include +anything that is normally distributed (in either source or binary +form) with the major components (compiler, kernel, and so on) of the +operating system on which the executable runs, unless that component +itself accompanies the executable. + +If distribution of executable or object code is made by offering +access to copy from a designated place, then offering equivalent +access to copy the source code from the same place counts as +distribution of the source code, even though third parties are not +compelled to copy the source along with the object code. + + 4. You may not copy, modify, sublicense, or distribute the Program +except as expressly provided under this License. Any attempt +otherwise to copy, modify, sublicense or distribute the Program is +void, and will automatically terminate your rights under this License. +However, parties who have received copies, or rights, from you under +this License will not have their licenses terminated so long as such +parties remain in full compliance. + + 5. You are not required to accept this License, since you have not +signed it. However, nothing else grants you permission to modify or +distribute the Program or its derivative works. These actions are +prohibited by law if you do not accept this License. Therefore, by +modifying or distributing the Program (or any work based on the +Program), you indicate your acceptance of this License to do so, and +all its terms and conditions for copying, distributing or modifying +the Program or works based on it. + + 6. Each time you redistribute the Program (or any work based on the +Program), the recipient automatically receives a license from the +original licensor to copy, distribute or modify the Program subject to +these terms and conditions. You may not impose any further +restrictions on the recipients' exercise of the rights granted herein. +You are not responsible for enforcing compliance by third parties to +this License. + + 7. If, as a consequence of a court judgment or allegation of patent +infringement or for any other reason (not limited to patent issues), +conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not +excuse you from the conditions of this License. If you cannot +distribute so as to satisfy simultaneously your obligations under this +License and any other pertinent obligations, then as a consequence you +may not distribute the Program at all. For example, if a patent +license would not permit royalty-free redistribution of the Program by +all those who receive copies directly or indirectly through you, then +the only way you could satisfy both it and this License would be to +refrain entirely from distribution of the Program. + +If any portion of this section is held invalid or unenforceable under +any particular circumstance, the balance of the section is intended to +apply and the section as a whole is intended to apply in other +circumstances. + +It is not the purpose of this section to induce you to infringe any +patents or other property right claims or to contest validity of any +such claims; this section has the sole purpose of protecting the +integrity of the free software distribution system, which is +implemented by public license practices. Many people have made +generous contributions to the wide range of software distributed +through that system in reliance on consistent application of that +system; it is up to the author/donor to decide if he or she is willing +to distribute software through any other system and a licensee cannot +impose that choice. + +This section is intended to make thoroughly clear what is believed to +be a consequence of the rest of this License. + + 8. If the distribution and/or use of the Program is restricted in +certain countries either by patents or by copyrighted interfaces, the +original copyright holder who places the Program under this License +may add an explicit geographical distribution limitation excluding +those countries, so that distribution is permitted only in or among +countries not thus excluded. In such case, this License incorporates +the limitation as if written in the body of this License. + + 9. The Free Software Foundation may publish revised and/or new versions +of the General Public License from time to time. Such new versions will +be similar in spirit to the present version, but may differ in detail to +address new problems or concerns. + +Each version is given a distinguishing version number. If the Program +specifies a version number of this License which applies to it and "any +later version", you have the option of following the terms and conditions +either of that version or of any later version published by the Free +Software Foundation. If the Program does not specify a version number of +this License, you may choose any version ever published by the Free Software +Foundation. + + 10. If you wish to incorporate parts of the Program into other free +programs whose distribution conditions are different, write to the author +to ask for permission. For software which is copyrighted by the Free +Software Foundation, write to the Free Software Foundation; we sometimes +make exceptions for this. Our decision will be guided by the two goals +of preserving the free status of all derivatives of our free software and +of promoting the sharing and reuse of software generally. + + NO WARRANTY + + 11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY +FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN +OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES +PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED +OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS +TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE +PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, +REPAIR OR CORRECTION. + + 12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING +WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR +REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, +INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING +OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED +TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY +YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER +PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE +POSSIBILITY OF SUCH DAMAGES. + + END OF TERMS AND CONDITIONS + + How to Apply These Terms to Your New Programs + + If you develop a new program, and you want it to be of the greatest +possible use to the public, the best way to achieve this is to make it +free software which everyone can redistribute and change under these terms. + + To do so, attach the following notices to the program. It is safest +to attach them to the start of each source file to most effectively +convey the exclusion of warranty; and each file should have at least +the "copyright" line and a pointer to where the full notice is found. + + + Copyright (C) + + This program is free software; you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation; either version 2 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License along + with this program; if not, write to the Free Software Foundation, Inc., + 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. + +Also add information on how to contact you by electronic and paper mail. + +If the program is interactive, make it output a short notice like this +when it starts in an interactive mode: + + Gnomovision version 69, Copyright (C) year name of author + Gnomovision comes with ABSOLUTELY NO WARRANTY; for details type `show w'. + This is free software, and you are welcome to redistribute it + under certain conditions; type `show c' for details. + +The hypothetical commands `show w' and `show c' should show the appropriate +parts of the General Public License. Of course, the commands you use may +be called something other than `show w' and `show c'; they could even be +mouse-clicks or menu items--whatever suits your program. + +You should also get your employer (if you work as a programmer) or your +school, if any, to sign a "copyright disclaimer" for the program, if +necessary. Here is a sample; alter the names: + + Yoyodyne, Inc., hereby disclaims all copyright interest in the program + `Gnomovision' (which makes passes at compilers) written by James Hacker. + + , 1 April 1989 + Ty Coon, President of Vice + +This General Public License does not permit incorporating your program into +proprietary programs. If your program is a subroutine library, you may +consider it more useful to permit linking proprietary applications with the +library. If this is what you want to do, use the GNU Lesser General +Public License instead of this License. + diff --git a/SparkleShare/Mac/git/bin/git b/SparkleShare/Mac/git/bin/git new file mode 100755 index 00000000..0e604ec9 Binary files /dev/null and b/SparkleShare/Mac/git/bin/git differ diff --git a/SparkleShare/Mac/git/bin/git-cvsserver b/SparkleShare/Mac/git/bin/git-cvsserver new file mode 100755 index 00000000..cafb11f4 --- /dev/null +++ b/SparkleShare/Mac/git/bin/git-cvsserver @@ -0,0 +1,3696 @@ +#!/usr/bin/perl +use lib (split(/:/, $ENV{GITPERLLIB} || "/usr/local/git/lib/perl5/site_perl")); + +#### +#### This application is a CVS emulation layer for git. +#### It is intended for clients to connect over SSH. +#### See the documentation for more details. +#### +#### Copyright The Open University UK - 2006. +#### +#### Authors: Martyn Smith +#### Martin Langhoff +#### +#### +#### Released under the GNU Public License, version 2. +#### +#### + +use 5.008; +use strict; +use warnings; +use bytes; + +use Fcntl; +use File::Temp qw/tempdir tempfile/; +use File::Path qw/rmtree/; +use File::Basename; +use Getopt::Long qw(:config require_order no_ignore_case); + +my $VERSION = '1.7.6.1'; + +my $log = GITCVS::log->new(); +my $cfg; + +my $DATE_LIST = { + Jan => "01", + Feb => "02", + Mar => "03", + Apr => "04", + May => "05", + Jun => "06", + Jul => "07", + Aug => "08", + Sep => "09", + Oct => "10", + Nov => "11", + Dec => "12", +}; + +# Enable autoflush for STDOUT (otherwise the whole thing falls apart) +$| = 1; + +#### Definition and mappings of functions #### + +my $methods = { + 'Root' => \&req_Root, + 'Valid-responses' => \&req_Validresponses, + 'valid-requests' => \&req_validrequests, + 'Directory' => \&req_Directory, + 'Entry' => \&req_Entry, + 'Modified' => \&req_Modified, + 'Unchanged' => \&req_Unchanged, + 'Questionable' => \&req_Questionable, + 'Argument' => \&req_Argument, + 'Argumentx' => \&req_Argument, + 'expand-modules' => \&req_expandmodules, + 'add' => \&req_add, + 'remove' => \&req_remove, + 'co' => \&req_co, + 'update' => \&req_update, + 'ci' => \&req_ci, + 'diff' => \&req_diff, + 'log' => \&req_log, + 'rlog' => \&req_log, + 'tag' => \&req_CATCHALL, + 'status' => \&req_status, + 'admin' => \&req_CATCHALL, + 'history' => \&req_CATCHALL, + 'watchers' => \&req_EMPTY, + 'editors' => \&req_EMPTY, + 'noop' => \&req_EMPTY, + 'annotate' => \&req_annotate, + 'Global_option' => \&req_Globaloption, + #'annotate' => \&req_CATCHALL, +}; + +############################################## + + +# $state holds all the bits of information the clients sends us that could +# potentially be useful when it comes to actually _doing_ something. +my $state = { prependdir => '' }; + +# Work is for managing temporary working directory +my $work = + { + state => undef, # undef, 1 (empty), 2 (with stuff) + workDir => undef, + index => undef, + emptyDir => undef, + tmpDir => undef + }; + +$log->info("--------------- STARTING -----------------"); + +my $usage = + "Usage: git cvsserver [options] [pserver|server] [ ...]\n". + " --base-path : Prepend to requested CVSROOT\n". + " Can be read from GIT_CVSSERVER_BASE_PATH\n". + " --strict-paths : Don't allow recursing into subdirectories\n". + " --export-all : Don't check for gitcvs.enabled in config\n". + " --version, -V : Print version information and exit\n". + " --help, -h, -H : Print usage information and exit\n". + "\n". + " ... is a list of allowed directories. If no directories\n". + "are given, all are allowed. This is an additional restriction, gitcvs\n". + "access still needs to be enabled by the gitcvs.enabled config option.\n". + "Alternately, one directory may be specified in GIT_CVSSERVER_ROOT.\n"; + +my @opts = ( 'help|h|H', 'version|V', + 'base-path=s', 'strict-paths', 'export-all' ); +GetOptions( $state, @opts ) + or die $usage; + +if ($state->{version}) { + print "git-cvsserver version $VERSION\n"; + exit; +} +if ($state->{help}) { + print $usage; + exit; +} + +my $TEMP_DIR = tempdir( CLEANUP => 1 ); +$log->debug("Temporary directory is '$TEMP_DIR'"); + +$state->{method} = 'ext'; +if (@ARGV) { + if ($ARGV[0] eq 'pserver') { + $state->{method} = 'pserver'; + shift @ARGV; + } elsif ($ARGV[0] eq 'server') { + shift @ARGV; + } +} + +# everything else is a directory +$state->{allowed_roots} = [ @ARGV ]; + +# don't export the whole system unless the users requests it +if ($state->{'export-all'} && !@{$state->{allowed_roots}}) { + die "--export-all can only be used together with an explicit whitelist\n"; +} + +# Environment handling for running under git-shell +if (exists $ENV{GIT_CVSSERVER_BASE_PATH}) { + if ($state->{'base-path'}) { + die "Cannot specify base path both ways.\n"; + } + my $base_path = $ENV{GIT_CVSSERVER_BASE_PATH}; + $state->{'base-path'} = $base_path; + $log->debug("Picked up base path '$base_path' from environment.\n"); +} +if (exists $ENV{GIT_CVSSERVER_ROOT}) { + if (@{$state->{allowed_roots}}) { + die "Cannot specify roots both ways: @ARGV\n"; + } + my $allowed_root = $ENV{GIT_CVSSERVER_ROOT}; + $state->{allowed_roots} = [ $allowed_root ]; + $log->debug("Picked up allowed root '$allowed_root' from environment.\n"); +} + +# if we are called with a pserver argument, +# deal with the authentication cat before entering the +# main loop +if ($state->{method} eq 'pserver') { + my $line = ; chomp $line; + unless( $line =~ /^BEGIN (AUTH|VERIFICATION) REQUEST$/) { + die "E Do not understand $line - expecting BEGIN AUTH REQUEST\n"; + } + my $request = $1; + $line = ; chomp $line; + unless (req_Root('root', $line)) { # reuse Root + print "E Invalid root $line \n"; + exit 1; + } + $line = ; chomp $line; + my $user = $line; + $line = ; chomp $line; + my $password = $line; + + if ($user eq 'anonymous') { + # "A" will be 1 byte, use length instead in case the + # encryption method ever changes (yeah, right!) + if (length($password) > 1 ) { + print "E Don't supply a password for the `anonymous' user\n"; + print "I HATE YOU\n"; + exit 1; + } + + # Fall through to LOVE + } else { + # Trying to authenticate a user + if (not exists $cfg->{gitcvs}->{authdb}) { + print "E the repo config file needs a [gitcvs] section with an 'authdb' parameter set to the filename of the authentication database\n"; + print "I HATE YOU\n"; + exit 1; + } + + my $authdb = $cfg->{gitcvs}->{authdb}; + + unless (-e $authdb) { + print "E The authentication database specified in [gitcvs.authdb] does not exist\n"; + print "I HATE YOU\n"; + exit 1; + } + + my $auth_ok; + open my $passwd, "<", $authdb or die $!; + while (<$passwd>) { + if (m{^\Q$user\E:(.*)}) { + if (crypt($user, descramble($password)) eq $1) { + $auth_ok = 1; + } + }; + } + close $passwd; + + unless ($auth_ok) { + print "I HATE YOU\n"; + exit 1; + } + + # Fall through to LOVE + } + + # For checking whether the user is anonymous on commit + $state->{user} = $user; + + $line = ; chomp $line; + unless ($line eq "END $request REQUEST") { + die "E Do not understand $line -- expecting END $request REQUEST\n"; + } + print "I LOVE YOU\n"; + exit if $request eq 'VERIFICATION'; # cvs login + # and now back to our regular programme... +} + +# Keep going until the client closes the connection +while () +{ + chomp; + + # Check to see if we've seen this method, and call appropriate function. + if ( /^([\w-]+)(?:\s+(.*))?$/ and defined($methods->{$1}) ) + { + # use the $methods hash to call the appropriate sub for this command + #$log->info("Method : $1"); + &{$methods->{$1}}($1,$2); + } else { + # log fatal because we don't understand this function. If this happens + # we're fairly screwed because we don't know if the client is expecting + # a response. If it is, the client will hang, we'll hang, and the whole + # thing will be custard. + $log->fatal("Don't understand command $_\n"); + die("Unknown command $_"); + } +} + +$log->debug("Processing time : user=" . (times)[0] . " system=" . (times)[1]); +$log->info("--------------- FINISH -----------------"); + +chdir '/'; +exit 0; + +# Magic catchall method. +# This is the method that will handle all commands we haven't yet +# implemented. It simply sends a warning to the log file indicating a +# command that hasn't been implemented has been invoked. +sub req_CATCHALL +{ + my ( $cmd, $data ) = @_; + $log->warn("Unhandled command : req_$cmd : $data"); +} + +# This method invariably succeeds with an empty response. +sub req_EMPTY +{ + print "ok\n"; +} + +# Root pathname \n +# Response expected: no. Tell the server which CVSROOT to use. Note that +# pathname is a local directory and not a fully qualified CVSROOT variable. +# pathname must already exist; if creating a new root, use the init +# request, not Root. pathname does not include the hostname of the server, +# how to access the server, etc.; by the time the CVS protocol is in use, +# connection, authentication, etc., are already taken care of. The Root +# request must be sent only once, and it must be sent before any requests +# other than Valid-responses, valid-requests, UseUnchanged, Set or init. +sub req_Root +{ + my ( $cmd, $data ) = @_; + $log->debug("req_Root : $data"); + + unless ($data =~ m#^/#) { + print "error 1 Root must be an absolute pathname\n"; + return 0; + } + + my $cvsroot = $state->{'base-path'} || ''; + $cvsroot =~ s#/+$##; + $cvsroot .= $data; + + if ($state->{CVSROOT} + && ($state->{CVSROOT} ne $cvsroot)) { + print "error 1 Conflicting roots specified\n"; + return 0; + } + + $state->{CVSROOT} = $cvsroot; + + $ENV{GIT_DIR} = $state->{CVSROOT} . "/"; + + if (@{$state->{allowed_roots}}) { + my $allowed = 0; + foreach my $dir (@{$state->{allowed_roots}}) { + next unless $dir =~ m#^/#; + $dir =~ s#/+$##; + if ($state->{'strict-paths'}) { + if ($ENV{GIT_DIR} =~ m#^\Q$dir\E/?$#) { + $allowed = 1; + last; + } + } elsif ($ENV{GIT_DIR} =~ m#^\Q$dir\E(/?$|/)#) { + $allowed = 1; + last; + } + } + + unless ($allowed) { + print "E $ENV{GIT_DIR} does not seem to be a valid GIT repository\n"; + print "E \n"; + print "error 1 $ENV{GIT_DIR} is not a valid repository\n"; + return 0; + } + } + + unless (-d $ENV{GIT_DIR} && -e $ENV{GIT_DIR}.'HEAD') { + print "E $ENV{GIT_DIR} does not seem to be a valid GIT repository\n"; + print "E \n"; + print "error 1 $ENV{GIT_DIR} is not a valid repository\n"; + return 0; + } + + my @gitvars = `git config -l`; + if ($?) { + print "E problems executing git-config on the server -- this is not a git repository or the PATH is not set correctly.\n"; + print "E \n"; + print "error 1 - problem executing git-config\n"; + return 0; + } + foreach my $line ( @gitvars ) + { + next unless ( $line =~ /^(gitcvs)\.(?:(ext|pserver)\.)?([\w-]+)=(.*)$/ ); + unless ($2) { + $cfg->{$1}{$3} = $4; + } else { + $cfg->{$1}{$2}{$3} = $4; + } + } + + my $enabled = ($cfg->{gitcvs}{$state->{method}}{enabled} + || $cfg->{gitcvs}{enabled}); + unless ($state->{'export-all'} || + ($enabled && $enabled =~ /^\s*(1|true|yes)\s*$/i)) { + print "E GITCVS emulation needs to be enabled on this repo\n"; + print "E the repo config file needs a [gitcvs] section added, and the parameter 'enabled' set to 1\n"; + print "E \n"; + print "error 1 GITCVS emulation disabled\n"; + return 0; + } + + my $logfile = $cfg->{gitcvs}{$state->{method}}{logfile} || $cfg->{gitcvs}{logfile}; + if ( $logfile ) + { + $log->setfile($logfile); + } else { + $log->nofile(); + } + + return 1; +} + +# Global_option option \n +# Response expected: no. Transmit one of the global options `-q', `-Q', +# `-l', `-t', `-r', or `-n'. option must be one of those strings, no +# variations (such as combining of options) are allowed. For graceful +# handling of valid-requests, it is probably better to make new global +# options separate requests, rather than trying to add them to this +# request. +sub req_Globaloption +{ + my ( $cmd, $data ) = @_; + $log->debug("req_Globaloption : $data"); + $state->{globaloptions}{$data} = 1; +} + +# Valid-responses request-list \n +# Response expected: no. Tell the server what responses the client will +# accept. request-list is a space separated list of tokens. +sub req_Validresponses +{ + my ( $cmd, $data ) = @_; + $log->debug("req_Validresponses : $data"); + + # TODO : re-enable this, currently it's not particularly useful + #$state->{validresponses} = [ split /\s+/, $data ]; +} + +# valid-requests \n +# Response expected: yes. Ask the server to send back a Valid-requests +# response. +sub req_validrequests +{ + my ( $cmd, $data ) = @_; + + $log->debug("req_validrequests"); + + $log->debug("SEND : Valid-requests " . join(" ",keys %$methods)); + $log->debug("SEND : ok"); + + print "Valid-requests " . join(" ",keys %$methods) . "\n"; + print "ok\n"; +} + +# Directory local-directory \n +# Additional data: repository \n. Response expected: no. Tell the server +# what directory to use. The repository should be a directory name from a +# previous server response. Note that this both gives a default for Entry +# and Modified and also for ci and the other commands; normal usage is to +# send Directory for each directory in which there will be an Entry or +# Modified, and then a final Directory for the original directory, then the +# command. The local-directory is relative to the top level at which the +# command is occurring (i.e. the last Directory which is sent before the +# command); to indicate that top level, `.' should be sent for +# local-directory. +sub req_Directory +{ + my ( $cmd, $data ) = @_; + + my $repository = ; + chomp $repository; + + + $state->{localdir} = $data; + $state->{repository} = $repository; + $state->{path} = $repository; + $state->{path} =~ s/^\Q$state->{CVSROOT}\E\///; + $state->{module} = $1 if ($state->{path} =~ s/^(.*?)(\/|$)//); + $state->{path} .= "/" if ( $state->{path} =~ /\S/ ); + + $state->{directory} = $state->{localdir}; + $state->{directory} = "" if ( $state->{directory} eq "." ); + $state->{directory} .= "/" if ( $state->{directory} =~ /\S/ ); + + if ( (not defined($state->{prependdir}) or $state->{prependdir} eq '') and $state->{localdir} eq "." and $state->{path} =~ /\S/ ) + { + $log->info("Setting prepend to '$state->{path}'"); + $state->{prependdir} = $state->{path}; + foreach my $entry ( keys %{$state->{entries}} ) + { + $state->{entries}{$state->{prependdir} . $entry} = $state->{entries}{$entry}; + delete $state->{entries}{$entry}; + } + } + + if ( defined ( $state->{prependdir} ) ) + { + $log->debug("Prepending '$state->{prependdir}' to state|directory"); + $state->{directory} = $state->{prependdir} . $state->{directory} + } + $log->debug("req_Directory : localdir=$data repository=$repository path=$state->{path} directory=$state->{directory} module=$state->{module}"); +} + +# Entry entry-line \n +# Response expected: no. Tell the server what version of a file is on the +# local machine. The name in entry-line is a name relative to the directory +# most recently specified with Directory. If the user is operating on only +# some files in a directory, Entry requests for only those files need be +# included. If an Entry request is sent without Modified, Is-modified, or +# Unchanged, it means the file is lost (does not exist in the working +# directory). If both Entry and one of Modified, Is-modified, or Unchanged +# are sent for the same file, Entry must be sent first. For a given file, +# one can send Modified, Is-modified, or Unchanged, but not more than one +# of these three. +sub req_Entry +{ + my ( $cmd, $data ) = @_; + + #$log->debug("req_Entry : $data"); + + my @data = split(/\//, $data); + + $state->{entries}{$state->{directory}.$data[1]} = { + revision => $data[2], + conflict => $data[3], + options => $data[4], + tag_or_date => $data[5], + }; + + $log->info("Received entry line '$data' => '" . $state->{directory} . $data[1] . "'"); +} + +# Questionable filename \n +# Response expected: no. Additional data: no. Tell the server to check +# whether filename should be ignored, and if not, next time the server +# sends responses, send (in a M response) `?' followed by the directory and +# filename. filename must not contain `/'; it needs to be a file in the +# directory named by the most recent Directory request. +sub req_Questionable +{ + my ( $cmd, $data ) = @_; + + $log->debug("req_Questionable : $data"); + $state->{entries}{$state->{directory}.$data}{questionable} = 1; +} + +# add \n +# Response expected: yes. Add a file or directory. This uses any previous +# Argument, Directory, Entry, or Modified requests, if they have been sent. +# The last Directory sent specifies the working directory at the time of +# the operation. To add a directory, send the directory to be added using +# Directory and Argument requests. +sub req_add +{ + my ( $cmd, $data ) = @_; + + argsplit("add"); + + my $updater = GITCVS::updater->new($state->{CVSROOT}, $state->{module}, $log); + $updater->update(); + + argsfromdir($updater); + + my $addcount = 0; + + foreach my $filename ( @{$state->{args}} ) + { + $filename = filecleanup($filename); + + my $meta = $updater->getmeta($filename); + my $wrev = revparse($filename); + + if ($wrev && $meta && ($wrev < 0)) + { + # previously removed file, add back + $log->info("added file $filename was previously removed, send 1.$meta->{revision}"); + + print "MT +updated\n"; + print "MT text U \n"; + print "MT fname $filename\n"; + print "MT newline\n"; + print "MT -updated\n"; + + unless ( $state->{globaloptions}{-n} ) + { + my ( $filepart, $dirpart ) = filenamesplit($filename,1); + + print "Created $dirpart\n"; + print $state->{CVSROOT} . "/$state->{module}/$filename\n"; + + # this is an "entries" line + my $kopts = kopts_from_path($filename,"sha1",$meta->{filehash}); + $log->debug("/$filepart/1.$meta->{revision}//$kopts/"); + print "/$filepart/1.$meta->{revision}//$kopts/\n"; + # permissions + $log->debug("SEND : u=$meta->{mode},g=$meta->{mode},o=$meta->{mode}"); + print "u=$meta->{mode},g=$meta->{mode},o=$meta->{mode}\n"; + # transmit file + transmitfile($meta->{filehash}); + } + + next; + } + + unless ( defined ( $state->{entries}{$filename}{modified_filename} ) ) + { + print "E cvs add: nothing known about `$filename'\n"; + next; + } + # TODO : check we're not squashing an already existing file + if ( defined ( $state->{entries}{$filename}{revision} ) ) + { + print "E cvs add: `$filename' has already been entered\n"; + next; + } + + my ( $filepart, $dirpart ) = filenamesplit($filename, 1); + + print "E cvs add: scheduling file `$filename' for addition\n"; + + print "Checked-in $dirpart\n"; + print "$filename\n"; + my $kopts = kopts_from_path($filename,"file", + $state->{entries}{$filename}{modified_filename}); + print "/$filepart/0//$kopts/\n"; + + my $requestedKopts = $state->{opt}{k}; + if(defined($requestedKopts)) + { + $requestedKopts = "-k$requestedKopts"; + } + else + { + $requestedKopts = ""; + } + if( $kopts ne $requestedKopts ) + { + $log->warn("Ignoring requested -k='$requestedKopts'" + . " for '$filename'; detected -k='$kopts' instead"); + #TODO: Also have option to send warning to user? + } + + $addcount++; + } + + if ( $addcount == 1 ) + { + print "E cvs add: use `cvs commit' to add this file permanently\n"; + } + elsif ( $addcount > 1 ) + { + print "E cvs add: use `cvs commit' to add these files permanently\n"; + } + + print "ok\n"; +} + +# remove \n +# Response expected: yes. Remove a file. This uses any previous Argument, +# Directory, Entry, or Modified requests, if they have been sent. The last +# Directory sent specifies the working directory at the time of the +# operation. Note that this request does not actually do anything to the +# repository; the only effect of a successful remove request is to supply +# the client with a new entries line containing `-' to indicate a removed +# file. In fact, the client probably could perform this operation without +# contacting the server, although using remove may cause the server to +# perform a few more checks. The client sends a subsequent ci request to +# actually record the removal in the repository. +sub req_remove +{ + my ( $cmd, $data ) = @_; + + argsplit("remove"); + + # Grab a handle to the SQLite db and do any necessary updates + my $updater = GITCVS::updater->new($state->{CVSROOT}, $state->{module}, $log); + $updater->update(); + + #$log->debug("add state : " . Dumper($state)); + + my $rmcount = 0; + + foreach my $filename ( @{$state->{args}} ) + { + $filename = filecleanup($filename); + + if ( defined ( $state->{entries}{$filename}{unchanged} ) or defined ( $state->{entries}{$filename}{modified_filename} ) ) + { + print "E cvs remove: file `$filename' still in working directory\n"; + next; + } + + my $meta = $updater->getmeta($filename); + my $wrev = revparse($filename); + + unless ( defined ( $wrev ) ) + { + print "E cvs remove: nothing known about `$filename'\n"; + next; + } + + if ( defined($wrev) and $wrev < 0 ) + { + print "E cvs remove: file `$filename' already scheduled for removal\n"; + next; + } + + unless ( $wrev == $meta->{revision} ) + { + # TODO : not sure if the format of this message is quite correct. + print "E cvs remove: Up to date check failed for `$filename'\n"; + next; + } + + + my ( $filepart, $dirpart ) = filenamesplit($filename, 1); + + print "E cvs remove: scheduling `$filename' for removal\n"; + + print "Checked-in $dirpart\n"; + print "$filename\n"; + my $kopts = kopts_from_path($filename,"sha1",$meta->{filehash}); + print "/$filepart/-1.$wrev//$kopts/\n"; + + $rmcount++; + } + + if ( $rmcount == 1 ) + { + print "E cvs remove: use `cvs commit' to remove this file permanently\n"; + } + elsif ( $rmcount > 1 ) + { + print "E cvs remove: use `cvs commit' to remove these files permanently\n"; + } + + print "ok\n"; +} + +# Modified filename \n +# Response expected: no. Additional data: mode, \n, file transmission. Send +# the server a copy of one locally modified file. filename is a file within +# the most recent directory sent with Directory; it must not contain `/'. +# If the user is operating on only some files in a directory, only those +# files need to be included. This can also be sent without Entry, if there +# is no entry for the file. +sub req_Modified +{ + my ( $cmd, $data ) = @_; + + my $mode = ; + defined $mode + or (print "E end of file reading mode for $data\n"), return; + chomp $mode; + my $size = ; + defined $size + or (print "E end of file reading size of $data\n"), return; + chomp $size; + + # Grab config information + my $blocksize = 8192; + my $bytesleft = $size; + my $tmp; + + # Get a filehandle/name to write it to + my ( $fh, $filename ) = tempfile( DIR => $TEMP_DIR ); + + # Loop over file data writing out to temporary file. + while ( $bytesleft ) + { + $blocksize = $bytesleft if ( $bytesleft < $blocksize ); + read STDIN, $tmp, $blocksize; + print $fh $tmp; + $bytesleft -= $blocksize; + } + + close $fh + or (print "E failed to write temporary, $filename: $!\n"), return; + + # Ensure we have something sensible for the file mode + if ( $mode =~ /u=(\w+)/ ) + { + $mode = $1; + } else { + $mode = "rw"; + } + + # Save the file data in $state + $state->{entries}{$state->{directory}.$data}{modified_filename} = $filename; + $state->{entries}{$state->{directory}.$data}{modified_mode} = $mode; + $state->{entries}{$state->{directory}.$data}{modified_hash} = `git hash-object $filename`; + $state->{entries}{$state->{directory}.$data}{modified_hash} =~ s/\s.*$//s; + + #$log->debug("req_Modified : file=$data mode=$mode size=$size"); +} + +# Unchanged filename \n +# Response expected: no. Tell the server that filename has not been +# modified in the checked out directory. The filename is a file within the +# most recent directory sent with Directory; it must not contain `/'. +sub req_Unchanged +{ + my ( $cmd, $data ) = @_; + + $state->{entries}{$state->{directory}.$data}{unchanged} = 1; + + #$log->debug("req_Unchanged : $data"); +} + +# Argument text \n +# Response expected: no. Save argument for use in a subsequent command. +# Arguments accumulate until an argument-using command is given, at which +# point they are forgotten. +# Argumentx text \n +# Response expected: no. Append \n followed by text to the current argument +# being saved. +sub req_Argument +{ + my ( $cmd, $data ) = @_; + + # Argumentx means: append to last Argument (with a newline in front) + + $log->debug("$cmd : $data"); + + if ( $cmd eq 'Argumentx') { + ${$state->{arguments}}[$#{$state->{arguments}}] .= "\n" . $data; + } else { + push @{$state->{arguments}}, $data; + } +} + +# expand-modules \n +# Response expected: yes. Expand the modules which are specified in the +# arguments. Returns the data in Module-expansion responses. Note that the +# server can assume that this is checkout or export, not rtag or rdiff; the +# latter do not access the working directory and thus have no need to +# expand modules on the client side. Expand may not be the best word for +# what this request does. It does not necessarily tell you all the files +# contained in a module, for example. Basically it is a way of telling you +# which working directories the server needs to know about in order to +# handle a checkout of the specified modules. For example, suppose that the +# server has a module defined by +# aliasmodule -a 1dir +# That is, one can check out aliasmodule and it will take 1dir in the +# repository and check it out to 1dir in the working directory. Now suppose +# the client already has this module checked out and is planning on using +# the co request to update it. Without using expand-modules, the client +# would have two bad choices: it could either send information about all +# working directories under the current directory, which could be +# unnecessarily slow, or it could be ignorant of the fact that aliasmodule +# stands for 1dir, and neglect to send information for 1dir, which would +# lead to incorrect operation. With expand-modules, the client would first +# ask for the module to be expanded: +sub req_expandmodules +{ + my ( $cmd, $data ) = @_; + + argsplit(); + + $log->debug("req_expandmodules : " . ( defined($data) ? $data : "[NULL]" ) ); + + unless ( ref $state->{arguments} eq "ARRAY" ) + { + print "ok\n"; + return; + } + + foreach my $module ( @{$state->{arguments}} ) + { + $log->debug("SEND : Module-expansion $module"); + print "Module-expansion $module\n"; + } + + print "ok\n"; + statecleanup(); +} + +# co \n +# Response expected: yes. Get files from the repository. This uses any +# previous Argument, Directory, Entry, or Modified requests, if they have +# been sent. Arguments to this command are module names; the client cannot +# know what directories they correspond to except by (1) just sending the +# co request, and then seeing what directory names the server sends back in +# its responses, and (2) the expand-modules request. +sub req_co +{ + my ( $cmd, $data ) = @_; + + argsplit("co"); + + # Provide list of modules, if -c was used. + if (exists $state->{opt}{c}) { + my $showref = `git show-ref --heads`; + for my $line (split '\n', $showref) { + if ( $line =~ m% refs/heads/(.*)$% ) { + print "M $1\t$1\n"; + } + } + print "ok\n"; + return 1; + } + + my $module = $state->{args}[0]; + $state->{module} = $module; + my $checkout_path = $module; + + # use the user specified directory if we're given it + $checkout_path = $state->{opt}{d} if ( exists ( $state->{opt}{d} ) ); + + $log->debug("req_co : " . ( defined($data) ? $data : "[NULL]" ) ); + + $log->info("Checking out module '$module' ($state->{CVSROOT}) to '$checkout_path'"); + + $ENV{GIT_DIR} = $state->{CVSROOT} . "/"; + + # Grab a handle to the SQLite db and do any necessary updates + my $updater = GITCVS::updater->new($state->{CVSROOT}, $module, $log); + $updater->update(); + + $checkout_path =~ s|/$||; # get rid of trailing slashes + + # Eclipse seems to need the Clear-sticky command + # to prepare the 'Entries' file for the new directory. + print "Clear-sticky $checkout_path/\n"; + print $state->{CVSROOT} . "/$module/\n"; + print "Clear-static-directory $checkout_path/\n"; + print $state->{CVSROOT} . "/$module/\n"; + print "Clear-sticky $checkout_path/\n"; # yes, twice + print $state->{CVSROOT} . "/$module/\n"; + print "Template $checkout_path/\n"; + print $state->{CVSROOT} . "/$module/\n"; + print "0\n"; + + # instruct the client that we're checking out to $checkout_path + print "E cvs checkout: Updating $checkout_path\n"; + + my %seendirs = (); + my $lastdir =''; + + # recursive + sub prepdir { + my ($dir, $repodir, $remotedir, $seendirs) = @_; + my $parent = dirname($dir); + $dir =~ s|/+$||; + $repodir =~ s|/+$||; + $remotedir =~ s|/+$||; + $parent =~ s|/+$||; + $log->debug("announcedir $dir, $repodir, $remotedir" ); + + if ($parent eq '.' || $parent eq './') { + $parent = ''; + } + # recurse to announce unseen parents first + if (length($parent) && !exists($seendirs->{$parent})) { + prepdir($parent, $repodir, $remotedir, $seendirs); + } + # Announce that we are going to modify at the parent level + if ($parent) { + print "E cvs checkout: Updating $remotedir/$parent\n"; + } else { + print "E cvs checkout: Updating $remotedir\n"; + } + print "Clear-sticky $remotedir/$parent/\n"; + print "$repodir/$parent/\n"; + + print "Clear-static-directory $remotedir/$dir/\n"; + print "$repodir/$dir/\n"; + print "Clear-sticky $remotedir/$parent/\n"; # yes, twice + print "$repodir/$parent/\n"; + print "Template $remotedir/$dir/\n"; + print "$repodir/$dir/\n"; + print "0\n"; + + $seendirs->{$dir} = 1; + } + + foreach my $git ( @{$updater->gethead} ) + { + # Don't want to check out deleted files + next if ( $git->{filehash} eq "deleted" ); + + my $fullName = $git->{name}; + ( $git->{name}, $git->{dir} ) = filenamesplit($git->{name}); + + if (length($git->{dir}) && $git->{dir} ne './' + && $git->{dir} ne $lastdir ) { + unless (exists($seendirs{$git->{dir}})) { + prepdir($git->{dir}, $state->{CVSROOT} . "/$module/", + $checkout_path, \%seendirs); + $lastdir = $git->{dir}; + $seendirs{$git->{dir}} = 1; + } + print "E cvs checkout: Updating /$checkout_path/$git->{dir}\n"; + } + + # modification time of this file + print "Mod-time $git->{modified}\n"; + + # print some information to the client + if ( defined ( $git->{dir} ) and $git->{dir} ne "./" ) + { + print "M U $checkout_path/$git->{dir}$git->{name}\n"; + } else { + print "M U $checkout_path/$git->{name}\n"; + } + + # instruct client we're sending a file to put in this path + print "Created $checkout_path/" . ( defined ( $git->{dir} ) and $git->{dir} ne "./" ? $git->{dir} . "/" : "" ) . "\n"; + + print $state->{CVSROOT} . "/$module/" . ( defined ( $git->{dir} ) and $git->{dir} ne "./" ? $git->{dir} . "/" : "" ) . "$git->{name}\n"; + + # this is an "entries" line + my $kopts = kopts_from_path($fullName,"sha1",$git->{filehash}); + print "/$git->{name}/1.$git->{revision}//$kopts/\n"; + # permissions + print "u=$git->{mode},g=$git->{mode},o=$git->{mode}\n"; + + # transmit file + transmitfile($git->{filehash}); + } + + print "ok\n"; + + statecleanup(); +} + +# update \n +# Response expected: yes. Actually do a cvs update command. This uses any +# previous Argument, Directory, Entry, or Modified requests, if they have +# been sent. The last Directory sent specifies the working directory at the +# time of the operation. The -I option is not used--files which the client +# can decide whether to ignore are not mentioned and the client sends the +# Questionable request for others. +sub req_update +{ + my ( $cmd, $data ) = @_; + + $log->debug("req_update : " . ( defined($data) ? $data : "[NULL]" )); + + argsplit("update"); + + # + # It may just be a client exploring the available heads/modules + # in that case, list them as top level directories and leave it + # at that. Eclipse uses this technique to offer you a list of + # projects (heads in this case) to checkout. + # + if ($state->{module} eq '') { + my $showref = `git show-ref --heads`; + print "E cvs update: Updating .\n"; + for my $line (split '\n', $showref) { + if ( $line =~ m% refs/heads/(.*)$% ) { + print "E cvs update: New directory `$1'\n"; + } + } + print "ok\n"; + return 1; + } + + + # Grab a handle to the SQLite db and do any necessary updates + my $updater = GITCVS::updater->new($state->{CVSROOT}, $state->{module}, $log); + + $updater->update(); + + argsfromdir($updater); + + #$log->debug("update state : " . Dumper($state)); + + my $last_dirname = "///"; + + # foreach file specified on the command line ... + foreach my $filename ( @{$state->{args}} ) + { + $filename = filecleanup($filename); + + $log->debug("Processing file $filename"); + + unless ( $state->{globaloptions}{-Q} || $state->{globaloptions}{-q} ) + { + my $cur_dirname = dirname($filename); + if ( $cur_dirname ne $last_dirname ) + { + $last_dirname = $cur_dirname; + if ( $cur_dirname eq "" ) + { + $cur_dirname = "."; + } + print "E cvs update: Updating $cur_dirname\n"; + } + } + + # if we have a -C we should pretend we never saw modified stuff + if ( exists ( $state->{opt}{C} ) ) + { + delete $state->{entries}{$filename}{modified_hash}; + delete $state->{entries}{$filename}{modified_filename}; + $state->{entries}{$filename}{unchanged} = 1; + } + + my $meta; + if ( defined($state->{opt}{r}) and $state->{opt}{r} =~ /^1\.(\d+)/ ) + { + $meta = $updater->getmeta($filename, $1); + } else { + $meta = $updater->getmeta($filename); + } + + # If -p was given, "print" the contents of the requested revision. + if ( exists ( $state->{opt}{p} ) ) { + if ( defined ( $meta->{revision} ) ) { + $log->info("Printing '$filename' revision " . $meta->{revision}); + + transmitfile($meta->{filehash}, { print => 1 }); + } + + next; + } + + if ( ! defined $meta ) + { + $meta = { + name => $filename, + revision => 0, + filehash => 'added' + }; + } + + my $oldmeta = $meta; + + my $wrev = revparse($filename); + + # If the working copy is an old revision, lets get that version too for comparison. + if ( defined($wrev) and $wrev != $meta->{revision} ) + { + $oldmeta = $updater->getmeta($filename, $wrev); + } + + #$log->debug("Target revision is $meta->{revision}, current working revision is $wrev"); + + # Files are up to date if the working copy and repo copy have the same revision, + # and the working copy is unmodified _and_ the user hasn't specified -C + next if ( defined ( $wrev ) + and defined($meta->{revision}) + and $wrev == $meta->{revision} + and $state->{entries}{$filename}{unchanged} + and not exists ( $state->{opt}{C} ) ); + + # If the working copy and repo copy have the same revision, + # but the working copy is modified, tell the client it's modified + if ( defined ( $wrev ) + and defined($meta->{revision}) + and $wrev == $meta->{revision} + and defined($state->{entries}{$filename}{modified_hash}) + and not exists ( $state->{opt}{C} ) ) + { + $log->info("Tell the client the file is modified"); + print "MT text M \n"; + print "MT fname $filename\n"; + print "MT newline\n"; + next; + } + + if ( $meta->{filehash} eq "deleted" ) + { + my ( $filepart, $dirpart ) = filenamesplit($filename,1); + + $log->info("Removing '$filename' from working copy (no longer in the repo)"); + + print "E cvs update: `$filename' is no longer in the repository\n"; + # Don't want to actually _DO_ the update if -n specified + unless ( $state->{globaloptions}{-n} ) { + print "Removed $dirpart\n"; + print "$filepart\n"; + } + } + elsif ( not defined ( $state->{entries}{$filename}{modified_hash} ) + or $state->{entries}{$filename}{modified_hash} eq $oldmeta->{filehash} + or $meta->{filehash} eq 'added' ) + { + # normal update, just send the new revision (either U=Update, + # or A=Add, or R=Remove) + if ( defined($wrev) && $wrev < 0 ) + { + $log->info("Tell the client the file is scheduled for removal"); + print "MT text R \n"; + print "MT fname $filename\n"; + print "MT newline\n"; + next; + } + elsif ( (!defined($wrev) || $wrev == 0) && (!defined($meta->{revision}) || $meta->{revision} == 0) ) + { + $log->info("Tell the client the file is scheduled for addition"); + print "MT text A \n"; + print "MT fname $filename\n"; + print "MT newline\n"; + next; + + } + else { + $log->info("Updating '$filename' to ".$meta->{revision}); + print "MT +updated\n"; + print "MT text U \n"; + print "MT fname $filename\n"; + print "MT newline\n"; + print "MT -updated\n"; + } + + my ( $filepart, $dirpart ) = filenamesplit($filename,1); + + # Don't want to actually _DO_ the update if -n specified + unless ( $state->{globaloptions}{-n} ) + { + if ( defined ( $wrev ) ) + { + # instruct client we're sending a file to put in this path as a replacement + print "Update-existing $dirpart\n"; + $log->debug("Updating existing file 'Update-existing $dirpart'"); + } else { + # instruct client we're sending a file to put in this path as a new file + print "Clear-static-directory $dirpart\n"; + print $state->{CVSROOT} . "/$state->{module}/$dirpart\n"; + print "Clear-sticky $dirpart\n"; + print $state->{CVSROOT} . "/$state->{module}/$dirpart\n"; + + $log->debug("Creating new file 'Created $dirpart'"); + print "Created $dirpart\n"; + } + print $state->{CVSROOT} . "/$state->{module}/$filename\n"; + + # this is an "entries" line + my $kopts = kopts_from_path($filename,"sha1",$meta->{filehash}); + $log->debug("/$filepart/1.$meta->{revision}//$kopts/"); + print "/$filepart/1.$meta->{revision}//$kopts/\n"; + + # permissions + $log->debug("SEND : u=$meta->{mode},g=$meta->{mode},o=$meta->{mode}"); + print "u=$meta->{mode},g=$meta->{mode},o=$meta->{mode}\n"; + + # transmit file + transmitfile($meta->{filehash}); + } + } else { + $log->info("Updating '$filename'"); + my ( $filepart, $dirpart ) = filenamesplit($meta->{name},1); + + my $mergeDir = setupTmpDir(); + + my $file_local = $filepart . ".mine"; + my $mergedFile = "$mergeDir/$file_local"; + system("ln","-s",$state->{entries}{$filename}{modified_filename}, $file_local); + my $file_old = $filepart . "." . $oldmeta->{revision}; + transmitfile($oldmeta->{filehash}, { targetfile => $file_old }); + my $file_new = $filepart . "." . $meta->{revision}; + transmitfile($meta->{filehash}, { targetfile => $file_new }); + + # we need to merge with the local changes ( M=successful merge, C=conflict merge ) + $log->info("Merging $file_local, $file_old, $file_new"); + print "M Merging differences between 1.$oldmeta->{revision} and 1.$meta->{revision} into $filename\n"; + + $log->debug("Temporary directory for merge is $mergeDir"); + + my $return = system("git", "merge-file", $file_local, $file_old, $file_new); + $return >>= 8; + + cleanupTmpDir(); + + if ( $return == 0 ) + { + $log->info("Merged successfully"); + print "M M $filename\n"; + $log->debug("Merged $dirpart"); + + # Don't want to actually _DO_ the update if -n specified + unless ( $state->{globaloptions}{-n} ) + { + print "Merged $dirpart\n"; + $log->debug($state->{CVSROOT} . "/$state->{module}/$filename"); + print $state->{CVSROOT} . "/$state->{module}/$filename\n"; + my $kopts = kopts_from_path("$dirpart/$filepart", + "file",$mergedFile); + $log->debug("/$filepart/1.$meta->{revision}//$kopts/"); + print "/$filepart/1.$meta->{revision}//$kopts/\n"; + } + } + elsif ( $return == 1 ) + { + $log->info("Merged with conflicts"); + print "E cvs update: conflicts found in $filename\n"; + print "M C $filename\n"; + + # Don't want to actually _DO_ the update if -n specified + unless ( $state->{globaloptions}{-n} ) + { + print "Merged $dirpart\n"; + print $state->{CVSROOT} . "/$state->{module}/$filename\n"; + my $kopts = kopts_from_path("$dirpart/$filepart", + "file",$mergedFile); + print "/$filepart/1.$meta->{revision}/+/$kopts/\n"; + } + } + else + { + $log->warn("Merge failed"); + next; + } + + # Don't want to actually _DO_ the update if -n specified + unless ( $state->{globaloptions}{-n} ) + { + # permissions + $log->debug("SEND : u=$meta->{mode},g=$meta->{mode},o=$meta->{mode}"); + print "u=$meta->{mode},g=$meta->{mode},o=$meta->{mode}\n"; + + # transmit file, format is single integer on a line by itself (file + # size) followed by the file contents + # TODO : we should copy files in blocks + my $data = `cat $mergedFile`; + $log->debug("File size : " . length($data)); + print length($data) . "\n"; + print $data; + } + } + + } + + print "ok\n"; +} + +sub req_ci +{ + my ( $cmd, $data ) = @_; + + argsplit("ci"); + + #$log->debug("State : " . Dumper($state)); + + $log->info("req_ci : " . ( defined($data) ? $data : "[NULL]" )); + + if ( $state->{method} eq 'pserver' and $state->{user} eq 'anonymous' ) + { + print "error 1 anonymous user cannot commit via pserver\n"; + cleanupWorkTree(); + exit; + } + + if ( -e $state->{CVSROOT} . "/index" ) + { + $log->warn("file 'index' already exists in the git repository"); + print "error 1 Index already exists in git repo\n"; + cleanupWorkTree(); + exit; + } + + # Grab a handle to the SQLite db and do any necessary updates + my $updater = GITCVS::updater->new($state->{CVSROOT}, $state->{module}, $log); + $updater->update(); + + # Remember where the head was at the beginning. + my $parenthash = `git show-ref -s refs/heads/$state->{module}`; + chomp $parenthash; + if ($parenthash !~ /^[0-9a-f]{40}$/) { + print "error 1 pserver cannot find the current HEAD of module"; + cleanupWorkTree(); + exit; + } + + setupWorkTree($parenthash); + + $log->info("Lockless commit start, basing commit on '$work->{workDir}', index file is '$work->{index}'"); + + $log->info("Created index '$work->{index}' for head $state->{module} - exit status $?"); + + my @committedfiles = (); + my %oldmeta; + + # foreach file specified on the command line ... + foreach my $filename ( @{$state->{args}} ) + { + my $committedfile = $filename; + $filename = filecleanup($filename); + + next unless ( exists $state->{entries}{$filename}{modified_filename} or not $state->{entries}{$filename}{unchanged} ); + + my $meta = $updater->getmeta($filename); + $oldmeta{$filename} = $meta; + + my $wrev = revparse($filename); + + my ( $filepart, $dirpart ) = filenamesplit($filename); + + # do a checkout of the file if it is part of this tree + if ($wrev) { + system('git', 'checkout-index', '-f', '-u', $filename); + unless ($? == 0) { + die "Error running git-checkout-index -f -u $filename : $!"; + } + } + + my $addflag = 0; + my $rmflag = 0; + $rmflag = 1 if ( defined($wrev) and $wrev < 0 ); + $addflag = 1 unless ( -e $filename ); + + # Do up to date checking + unless ( $addflag or $wrev == $meta->{revision} or ( $rmflag and -$wrev == $meta->{revision} ) ) + { + # fail everything if an up to date check fails + print "error 1 Up to date check failed for $filename\n"; + cleanupWorkTree(); + exit; + } + + push @committedfiles, $committedfile; + $log->info("Committing $filename"); + + system("mkdir","-p",$dirpart) unless ( -d $dirpart ); + + unless ( $rmflag ) + { + $log->debug("rename $state->{entries}{$filename}{modified_filename} $filename"); + rename $state->{entries}{$filename}{modified_filename},$filename; + + # Calculate modes to remove + my $invmode = ""; + foreach ( qw (r w x) ) { $invmode .= $_ unless ( $state->{entries}{$filename}{modified_mode} =~ /$_/ ); } + + $log->debug("chmod u+" . $state->{entries}{$filename}{modified_mode} . "-" . $invmode . " $filename"); + system("chmod","u+" . $state->{entries}{$filename}{modified_mode} . "-" . $invmode, $filename); + } + + if ( $rmflag ) + { + $log->info("Removing file '$filename'"); + unlink($filename); + system("git", "update-index", "--remove", $filename); + } + elsif ( $addflag ) + { + $log->info("Adding file '$filename'"); + system("git", "update-index", "--add", $filename); + } else { + $log->info("Updating file '$filename'"); + system("git", "update-index", $filename); + } + } + + unless ( scalar(@committedfiles) > 0 ) + { + print "E No files to commit\n"; + print "ok\n"; + cleanupWorkTree(); + return; + } + + my $treehash = `git write-tree`; + chomp $treehash; + + $log->debug("Treehash : $treehash, Parenthash : $parenthash"); + + # write our commit message out if we have one ... + my ( $msg_fh, $msg_filename ) = tempfile( DIR => $TEMP_DIR ); + print $msg_fh $state->{opt}{m};# if ( exists ( $state->{opt}{m} ) ); + if ( defined ( $cfg->{gitcvs}{commitmsgannotation} ) ) { + if ($cfg->{gitcvs}{commitmsgannotation} !~ /^\s*$/ ) { + print $msg_fh "\n\n".$cfg->{gitcvs}{commitmsgannotation}."\n" + } + } else { + print $msg_fh "\n\nvia git-CVS emulator\n"; + } + close $msg_fh; + + my $commithash = `git commit-tree $treehash -p $parenthash < $msg_filename`; + chomp($commithash); + $log->info("Commit hash : $commithash"); + + unless ( $commithash =~ /[a-zA-Z0-9]{40}/ ) + { + $log->warn("Commit failed (Invalid commit hash)"); + print "error 1 Commit failed (unknown reason)\n"; + cleanupWorkTree(); + exit; + } + + ### Emulate git-receive-pack by running hooks/update + my @hook = ( $ENV{GIT_DIR}.'hooks/update', "refs/heads/$state->{module}", + $parenthash, $commithash ); + if( -x $hook[0] ) { + unless( system( @hook ) == 0 ) + { + $log->warn("Commit failed (update hook declined to update ref)"); + print "error 1 Commit failed (update hook declined)\n"; + cleanupWorkTree(); + exit; + } + } + + ### Update the ref + if (system(qw(git update-ref -m), "cvsserver ci", + "refs/heads/$state->{module}", $commithash, $parenthash)) { + $log->warn("update-ref for $state->{module} failed."); + print "error 1 Cannot commit -- update first\n"; + cleanupWorkTree(); + exit; + } + + ### Emulate git-receive-pack by running hooks/post-receive + my $hook = $ENV{GIT_DIR}.'hooks/post-receive'; + if( -x $hook ) { + open(my $pipe, "| $hook") || die "can't fork $!"; + + local $SIG{PIPE} = sub { die 'pipe broke' }; + + print $pipe "$parenthash $commithash refs/heads/$state->{module}\n"; + + close $pipe || die "bad pipe: $! $?"; + } + + $updater->update(); + + ### Then hooks/post-update + $hook = $ENV{GIT_DIR}.'hooks/post-update'; + if (-x $hook) { + system($hook, "refs/heads/$state->{module}"); + } + + # foreach file specified on the command line ... + foreach my $filename ( @committedfiles ) + { + $filename = filecleanup($filename); + + my $meta = $updater->getmeta($filename); + unless (defined $meta->{revision}) { + $meta->{revision} = 1; + } + + my ( $filepart, $dirpart ) = filenamesplit($filename, 1); + + $log->debug("Checked-in $dirpart : $filename"); + + print "M $state->{CVSROOT}/$state->{module}/$filename,v <-- $dirpart$filepart\n"; + if ( defined $meta->{filehash} && $meta->{filehash} eq "deleted" ) + { + print "M new revision: delete; previous revision: 1.$oldmeta{$filename}{revision}\n"; + print "Remove-entry $dirpart\n"; + print "$filename\n"; + } else { + if ($meta->{revision} == 1) { + print "M initial revision: 1.1\n"; + } else { + print "M new revision: 1.$meta->{revision}; previous revision: 1.$oldmeta{$filename}{revision}\n"; + } + print "Checked-in $dirpart\n"; + print "$filename\n"; + my $kopts = kopts_from_path($filename,"sha1",$meta->{filehash}); + print "/$filepart/1.$meta->{revision}//$kopts/\n"; + } + } + + cleanupWorkTree(); + print "ok\n"; +} + +sub req_status +{ + my ( $cmd, $data ) = @_; + + argsplit("status"); + + $log->info("req_status : " . ( defined($data) ? $data : "[NULL]" )); + #$log->debug("status state : " . Dumper($state)); + + # Grab a handle to the SQLite db and do any necessary updates + my $updater = GITCVS::updater->new($state->{CVSROOT}, $state->{module}, $log); + $updater->update(); + + # if no files were specified, we need to work out what files we should be providing status on ... + argsfromdir($updater); + + # foreach file specified on the command line ... + foreach my $filename ( @{$state->{args}} ) + { + $filename = filecleanup($filename); + + next if exists($state->{opt}{l}) && index($filename, '/', length($state->{prependdir})) >= 0; + + my $meta = $updater->getmeta($filename); + my $oldmeta = $meta; + + my $wrev = revparse($filename); + + # If the working copy is an old revision, lets get that version too for comparison. + if ( defined($wrev) and $wrev != $meta->{revision} ) + { + $oldmeta = $updater->getmeta($filename, $wrev); + } + + # TODO : All possible statuses aren't yet implemented + my $status; + # Files are up to date if the working copy and repo copy have the same revision, and the working copy is unmodified + $status = "Up-to-date" if ( defined ( $wrev ) and defined($meta->{revision}) and $wrev == $meta->{revision} + and + ( ( $state->{entries}{$filename}{unchanged} and ( not defined ( $state->{entries}{$filename}{conflict} ) or $state->{entries}{$filename}{conflict} !~ /^\+=/ ) ) + or ( defined($state->{entries}{$filename}{modified_hash}) and $state->{entries}{$filename}{modified_hash} eq $meta->{filehash} ) ) + ); + + # Need checkout if the working copy has an older revision than the repo copy, and the working copy is unmodified + $status ||= "Needs Checkout" if ( defined ( $wrev ) and defined ( $meta->{revision} ) and $meta->{revision} > $wrev + and + ( $state->{entries}{$filename}{unchanged} + or ( defined($state->{entries}{$filename}{modified_hash}) and $state->{entries}{$filename}{modified_hash} eq $oldmeta->{filehash} ) ) + ); + + # Need checkout if it exists in the repo but doesn't have a working copy + $status ||= "Needs Checkout" if ( not defined ( $wrev ) and defined ( $meta->{revision} ) ); + + # Locally modified if working copy and repo copy have the same revision but there are local changes + $status ||= "Locally Modified" if ( defined ( $wrev ) and defined($meta->{revision}) and $wrev == $meta->{revision} and $state->{entries}{$filename}{modified_filename} ); + + # Needs Merge if working copy revision is less than repo copy and there are local changes + $status ||= "Needs Merge" if ( defined ( $wrev ) and defined ( $meta->{revision} ) and $meta->{revision} > $wrev and $state->{entries}{$filename}{modified_filename} ); + + $status ||= "Locally Added" if ( defined ( $state->{entries}{$filename}{revision} ) and not defined ( $meta->{revision} ) ); + $status ||= "Locally Removed" if ( defined ( $wrev ) and defined ( $meta->{revision} ) and -$wrev == $meta->{revision} ); + $status ||= "Unresolved Conflict" if ( defined ( $state->{entries}{$filename}{conflict} ) and $state->{entries}{$filename}{conflict} =~ /^\+=/ ); + $status ||= "File had conflicts on merge" if ( 0 ); + + $status ||= "Unknown"; + + my ($filepart) = filenamesplit($filename); + + print "M ===================================================================\n"; + print "M File: $filepart\tStatus: $status\n"; + if ( defined($state->{entries}{$filename}{revision}) ) + { + print "M Working revision:\t" . $state->{entries}{$filename}{revision} . "\n"; + } else { + print "M Working revision:\tNo entry for $filename\n"; + } + if ( defined($meta->{revision}) ) + { + print "M Repository revision:\t1." . $meta->{revision} . "\t$state->{CVSROOT}/$state->{module}/$filename,v\n"; + print "M Sticky Tag:\t\t(none)\n"; + print "M Sticky Date:\t\t(none)\n"; + print "M Sticky Options:\t\t(none)\n"; + } else { + print "M Repository revision:\tNo revision control file\n"; + } + print "M\n"; + } + + print "ok\n"; +} + +sub req_diff +{ + my ( $cmd, $data ) = @_; + + argsplit("diff"); + + $log->debug("req_diff : " . ( defined($data) ? $data : "[NULL]" )); + #$log->debug("status state : " . Dumper($state)); + + my ($revision1, $revision2); + if ( defined ( $state->{opt}{r} ) and ref $state->{opt}{r} eq "ARRAY" ) + { + $revision1 = $state->{opt}{r}[0]; + $revision2 = $state->{opt}{r}[1]; + } else { + $revision1 = $state->{opt}{r}; + } + + $revision1 =~ s/^1\.// if ( defined ( $revision1 ) ); + $revision2 =~ s/^1\.// if ( defined ( $revision2 ) ); + + $log->debug("Diffing revisions " . ( defined($revision1) ? $revision1 : "[NULL]" ) . " and " . ( defined($revision2) ? $revision2 : "[NULL]" ) ); + + # Grab a handle to the SQLite db and do any necessary updates + my $updater = GITCVS::updater->new($state->{CVSROOT}, $state->{module}, $log); + $updater->update(); + + # if no files were specified, we need to work out what files we should be providing status on ... + argsfromdir($updater); + + # foreach file specified on the command line ... + foreach my $filename ( @{$state->{args}} ) + { + $filename = filecleanup($filename); + + my ( $fh, $file1, $file2, $meta1, $meta2, $filediff ); + + my $wrev = revparse($filename); + + # We need _something_ to diff against + next unless ( defined ( $wrev ) ); + + # if we have a -r switch, use it + if ( defined ( $revision1 ) ) + { + ( undef, $file1 ) = tempfile( DIR => $TEMP_DIR, OPEN => 0 ); + $meta1 = $updater->getmeta($filename, $revision1); + unless ( defined ( $meta1 ) and $meta1->{filehash} ne "deleted" ) + { + print "E File $filename at revision 1.$revision1 doesn't exist\n"; + next; + } + transmitfile($meta1->{filehash}, { targetfile => $file1 }); + } + # otherwise we just use the working copy revision + else + { + ( undef, $file1 ) = tempfile( DIR => $TEMP_DIR, OPEN => 0 ); + $meta1 = $updater->getmeta($filename, $wrev); + transmitfile($meta1->{filehash}, { targetfile => $file1 }); + } + + # if we have a second -r switch, use it too + if ( defined ( $revision2 ) ) + { + ( undef, $file2 ) = tempfile( DIR => $TEMP_DIR, OPEN => 0 ); + $meta2 = $updater->getmeta($filename, $revision2); + + unless ( defined ( $meta2 ) and $meta2->{filehash} ne "deleted" ) + { + print "E File $filename at revision 1.$revision2 doesn't exist\n"; + next; + } + + transmitfile($meta2->{filehash}, { targetfile => $file2 }); + } + # otherwise we just use the working copy + else + { + $file2 = $state->{entries}{$filename}{modified_filename}; + } + + # if we have been given -r, and we don't have a $file2 yet, lets get one + if ( defined ( $revision1 ) and not defined ( $file2 ) ) + { + ( undef, $file2 ) = tempfile( DIR => $TEMP_DIR, OPEN => 0 ); + $meta2 = $updater->getmeta($filename, $wrev); + transmitfile($meta2->{filehash}, { targetfile => $file2 }); + } + + # We need to have retrieved something useful + next unless ( defined ( $meta1 ) ); + + # Files to date if the working copy and repo copy have the same revision, and the working copy is unmodified + next if ( not defined ( $meta2 ) and $wrev == $meta1->{revision} + and + ( ( $state->{entries}{$filename}{unchanged} and ( not defined ( $state->{entries}{$filename}{conflict} ) or $state->{entries}{$filename}{conflict} !~ /^\+=/ ) ) + or ( defined($state->{entries}{$filename}{modified_hash}) and $state->{entries}{$filename}{modified_hash} eq $meta1->{filehash} ) ) + ); + + # Apparently we only show diffs for locally modified files + next unless ( defined($meta2) or defined ( $state->{entries}{$filename}{modified_filename} ) ); + + print "M Index: $filename\n"; + print "M ===================================================================\n"; + print "M RCS file: $state->{CVSROOT}/$state->{module}/$filename,v\n"; + print "M retrieving revision 1.$meta1->{revision}\n" if ( defined ( $meta1 ) ); + print "M retrieving revision 1.$meta2->{revision}\n" if ( defined ( $meta2 ) ); + print "M diff "; + foreach my $opt ( keys %{$state->{opt}} ) + { + if ( ref $state->{opt}{$opt} eq "ARRAY" ) + { + foreach my $value ( @{$state->{opt}{$opt}} ) + { + print "-$opt $value "; + } + } else { + print "-$opt "; + print "$state->{opt}{$opt} " if ( defined ( $state->{opt}{$opt} ) ); + } + } + print "$filename\n"; + + $log->info("Diffing $filename -r $meta1->{revision} -r " . ( $meta2->{revision} or "workingcopy" )); + + ( $fh, $filediff ) = tempfile ( DIR => $TEMP_DIR ); + + if ( exists $state->{opt}{u} ) + { + system("diff -u -L '$filename revision 1.$meta1->{revision}' -L '$filename " . ( defined($meta2->{revision}) ? "revision 1.$meta2->{revision}" : "working copy" ) . "' $file1 $file2 > $filediff"); + } else { + system("diff $file1 $file2 > $filediff"); + } + + while ( <$fh> ) + { + print "M $_"; + } + close $fh; + } + + print "ok\n"; +} + +sub req_log +{ + my ( $cmd, $data ) = @_; + + argsplit("log"); + + $log->debug("req_log : " . ( defined($data) ? $data : "[NULL]" )); + #$log->debug("log state : " . Dumper($state)); + + my ( $minrev, $maxrev ); + if ( defined ( $state->{opt}{r} ) and $state->{opt}{r} =~ /([\d.]+)?(::?)([\d.]+)?/ ) + { + my $control = $2; + $minrev = $1; + $maxrev = $3; + $minrev =~ s/^1\.// if ( defined ( $minrev ) ); + $maxrev =~ s/^1\.// if ( defined ( $maxrev ) ); + $minrev++ if ( defined($minrev) and $control eq "::" ); + } + + # Grab a handle to the SQLite db and do any necessary updates + my $updater = GITCVS::updater->new($state->{CVSROOT}, $state->{module}, $log); + $updater->update(); + + # if no files were specified, we need to work out what files we should be providing status on ... + argsfromdir($updater); + + # foreach file specified on the command line ... + foreach my $filename ( @{$state->{args}} ) + { + $filename = filecleanup($filename); + + my $headmeta = $updater->getmeta($filename); + + my $revisions = $updater->getlog($filename); + my $totalrevisions = scalar(@$revisions); + + if ( defined ( $minrev ) ) + { + $log->debug("Removing revisions less than $minrev"); + while ( scalar(@$revisions) > 0 and $revisions->[-1]{revision} < $minrev ) + { + pop @$revisions; + } + } + if ( defined ( $maxrev ) ) + { + $log->debug("Removing revisions greater than $maxrev"); + while ( scalar(@$revisions) > 0 and $revisions->[0]{revision} > $maxrev ) + { + shift @$revisions; + } + } + + next unless ( scalar(@$revisions) ); + + print "M \n"; + print "M RCS file: $state->{CVSROOT}/$state->{module}/$filename,v\n"; + print "M Working file: $filename\n"; + print "M head: 1.$headmeta->{revision}\n"; + print "M branch:\n"; + print "M locks: strict\n"; + print "M access list:\n"; + print "M symbolic names:\n"; + print "M keyword substitution: kv\n"; + print "M total revisions: $totalrevisions;\tselected revisions: " . scalar(@$revisions) . "\n"; + print "M description:\n"; + + foreach my $revision ( @$revisions ) + { + print "M ----------------------------\n"; + print "M revision 1.$revision->{revision}\n"; + # reformat the date for log output + $revision->{modified} = sprintf('%04d/%02d/%02d %s', $3, $DATE_LIST->{$2}, $1, $4 ) if ( $revision->{modified} =~ /(\d+)\s+(\w+)\s+(\d+)\s+(\S+)/ and defined($DATE_LIST->{$2}) ); + $revision->{author} = cvs_author($revision->{author}); + print "M date: $revision->{modified}; author: $revision->{author}; state: " . ( $revision->{filehash} eq "deleted" ? "dead" : "Exp" ) . "; lines: +2 -3\n"; + my $commitmessage = $updater->commitmessage($revision->{commithash}); + $commitmessage =~ s/^/M /mg; + print $commitmessage . "\n"; + } + print "M =============================================================================\n"; + } + + print "ok\n"; +} + +sub req_annotate +{ + my ( $cmd, $data ) = @_; + + argsplit("annotate"); + + $log->info("req_annotate : " . ( defined($data) ? $data : "[NULL]" )); + #$log->debug("status state : " . Dumper($state)); + + # Grab a handle to the SQLite db and do any necessary updates + my $updater = GITCVS::updater->new($state->{CVSROOT}, $state->{module}, $log); + $updater->update(); + + # if no files were specified, we need to work out what files we should be providing annotate on ... + argsfromdir($updater); + + # we'll need a temporary checkout dir + setupWorkTree(); + + $log->info("Temp checkoutdir creation successful, basing annotate session work on '$work->{workDir}', index file is '$ENV{GIT_INDEX_FILE}'"); + + # foreach file specified on the command line ... + foreach my $filename ( @{$state->{args}} ) + { + $filename = filecleanup($filename); + + my $meta = $updater->getmeta($filename); + + next unless ( $meta->{revision} ); + + # get all the commits that this file was in + # in dense format -- aka skip dead revisions + my $revisions = $updater->gethistorydense($filename); + my $lastseenin = $revisions->[0][2]; + + # populate the temporary index based on the latest commit were we saw + # the file -- but do it cheaply without checking out any files + # TODO: if we got a revision from the client, use that instead + # to look up the commithash in sqlite (still good to default to + # the current head as we do now) + system("git", "read-tree", $lastseenin); + unless ($? == 0) + { + print "E error running git-read-tree $lastseenin $ENV{GIT_INDEX_FILE} $!\n"; + return; + } + $log->info("Created index '$ENV{GIT_INDEX_FILE}' with commit $lastseenin - exit status $?"); + + # do a checkout of the file + system('git', 'checkout-index', '-f', '-u', $filename); + unless ($? == 0) { + print "E error running git-checkout-index -f -u $filename : $!\n"; + return; + } + + $log->info("Annotate $filename"); + + # Prepare a file with the commits from the linearized + # history that annotate should know about. This prevents + # git-jsannotate telling us about commits we are hiding + # from the client. + + my $a_hints = "$work->{workDir}/.annotate_hints"; + if (!open(ANNOTATEHINTS, '>', $a_hints)) { + print "E failed to open '$a_hints' for writing: $!\n"; + return; + } + for (my $i=0; $i < @$revisions; $i++) + { + print ANNOTATEHINTS $revisions->[$i][2]; + if ($i+1 < @$revisions) { # have we got a parent? + print ANNOTATEHINTS ' ' . $revisions->[$i+1][2]; + } + print ANNOTATEHINTS "\n"; + } + + print ANNOTATEHINTS "\n"; + close ANNOTATEHINTS + or (print "E failed to write $a_hints: $!\n"), return; + + my @cmd = (qw(git annotate -l -S), $a_hints, $filename); + if (!open(ANNOTATE, "-|", @cmd)) { + print "E error invoking ". join(' ',@cmd) .": $!\n"; + return; + } + my $metadata = {}; + print "E Annotations for $filename\n"; + print "E ***************\n"; + while ( ) + { + if (m/^([a-zA-Z0-9]{40})\t\([^\)]*\)(.*)$/i) + { + my $commithash = $1; + my $data = $2; + unless ( defined ( $metadata->{$commithash} ) ) + { + $metadata->{$commithash} = $updater->getmeta($filename, $commithash); + $metadata->{$commithash}{author} = cvs_author($metadata->{$commithash}{author}); + $metadata->{$commithash}{modified} = sprintf("%02d-%s-%02d", $1, $2, $3) if ( $metadata->{$commithash}{modified} =~ /^(\d+)\s(\w+)\s\d\d(\d\d)/ ); + } + printf("M 1.%-5d (%-8s %10s): %s\n", + $metadata->{$commithash}{revision}, + $metadata->{$commithash}{author}, + $metadata->{$commithash}{modified}, + $data + ); + } else { + $log->warn("Error in annotate output! LINE: $_"); + print "E Annotate error \n"; + next; + } + } + close ANNOTATE; + } + + # done; get out of the tempdir + cleanupWorkTree(); + + print "ok\n"; + +} + +# This method takes the state->{arguments} array and produces two new arrays. +# The first is $state->{args} which is everything before the '--' argument, and +# the second is $state->{files} which is everything after it. +sub argsplit +{ + $state->{args} = []; + $state->{files} = []; + $state->{opt} = {}; + + return unless( defined($state->{arguments}) and ref $state->{arguments} eq "ARRAY" ); + + my $type = shift; + + if ( defined($type) ) + { + my $opt = {}; + $opt = { A => 0, N => 0, P => 0, R => 0, c => 0, f => 0, l => 0, n => 0, p => 0, s => 0, r => 1, D => 1, d => 1, k => 1, j => 1, } if ( $type eq "co" ); + $opt = { v => 0, l => 0, R => 0 } if ( $type eq "status" ); + $opt = { A => 0, P => 0, C => 0, d => 0, f => 0, l => 0, R => 0, p => 0, k => 1, r => 1, D => 1, j => 1, I => 1, W => 1 } if ( $type eq "update" ); + $opt = { l => 0, R => 0, k => 1, D => 1, D => 1, r => 2 } if ( $type eq "diff" ); + $opt = { c => 0, R => 0, l => 0, f => 0, F => 1, m => 1, r => 1 } if ( $type eq "ci" ); + $opt = { k => 1, m => 1 } if ( $type eq "add" ); + $opt = { f => 0, l => 0, R => 0 } if ( $type eq "remove" ); + $opt = { l => 0, b => 0, h => 0, R => 0, t => 0, N => 0, S => 0, r => 1, d => 1, s => 1, w => 1 } if ( $type eq "log" ); + + + while ( scalar ( @{$state->{arguments}} ) > 0 ) + { + my $arg = shift @{$state->{arguments}}; + + next if ( $arg eq "--" ); + next unless ( $arg =~ /\S/ ); + + # if the argument looks like a switch + if ( $arg =~ /^-(\w)(.*)/ ) + { + # if it's a switch that takes an argument + if ( $opt->{$1} ) + { + # If this switch has already been provided + if ( $opt->{$1} > 1 and exists ( $state->{opt}{$1} ) ) + { + $state->{opt}{$1} = [ $state->{opt}{$1} ]; + if ( length($2) > 0 ) + { + push @{$state->{opt}{$1}},$2; + } else { + push @{$state->{opt}{$1}}, shift @{$state->{arguments}}; + } + } else { + # if there's extra data in the arg, use that as the argument for the switch + if ( length($2) > 0 ) + { + $state->{opt}{$1} = $2; + } else { + $state->{opt}{$1} = shift @{$state->{arguments}}; + } + } + } else { + $state->{opt}{$1} = undef; + } + } + else + { + push @{$state->{args}}, $arg; + } + } + } + else + { + my $mode = 0; + + foreach my $value ( @{$state->{arguments}} ) + { + if ( $value eq "--" ) + { + $mode++; + next; + } + push @{$state->{args}}, $value if ( $mode == 0 ); + push @{$state->{files}}, $value if ( $mode == 1 ); + } + } +} + +# This method uses $state->{directory} to populate $state->{args} with a list of filenames +sub argsfromdir +{ + my $updater = shift; + + $state->{args} = [] if ( scalar(@{$state->{args}}) == 1 and $state->{args}[0] eq "." ); + + return if ( scalar ( @{$state->{args}} ) > 1 ); + + my @gethead = @{$updater->gethead}; + + # push added files + foreach my $file (keys %{$state->{entries}}) { + if ( exists $state->{entries}{$file}{revision} && + $state->{entries}{$file}{revision} == 0 ) + { + push @gethead, { name => $file, filehash => 'added' }; + } + } + + if ( scalar(@{$state->{args}}) == 1 ) + { + my $arg = $state->{args}[0]; + $arg .= $state->{prependdir} if ( defined ( $state->{prependdir} ) ); + + $log->info("Only one arg specified, checking for directory expansion on '$arg'"); + + foreach my $file ( @gethead ) + { + next if ( $file->{filehash} eq "deleted" and not defined ( $state->{entries}{$file->{name}} ) ); + next unless ( $file->{name} =~ /^$arg\// or $file->{name} eq $arg ); + push @{$state->{args}}, $file->{name}; + } + + shift @{$state->{args}} if ( scalar(@{$state->{args}}) > 1 ); + } else { + $log->info("Only one arg specified, populating file list automatically"); + + $state->{args} = []; + + foreach my $file ( @gethead ) + { + next if ( $file->{filehash} eq "deleted" and not defined ( $state->{entries}{$file->{name}} ) ); + next unless ( $file->{name} =~ s/^$state->{prependdir}// ); + push @{$state->{args}}, $file->{name}; + } + } +} + +# This method cleans up the $state variable after a command that uses arguments has run +sub statecleanup +{ + $state->{files} = []; + $state->{args} = []; + $state->{arguments} = []; + $state->{entries} = {}; +} + +sub revparse +{ + my $filename = shift; + + return undef unless ( defined ( $state->{entries}{$filename}{revision} ) ); + + return $1 if ( $state->{entries}{$filename}{revision} =~ /^1\.(\d+)/ ); + return -$1 if ( $state->{entries}{$filename}{revision} =~ /^-1\.(\d+)/ ); + + return undef; +} + +# This method takes a file hash and does a CVS "file transfer". Its +# exact behaviour depends on a second, optional hash table argument: +# - If $options->{targetfile}, dump the contents to that file; +# - If $options->{print}, use M/MT to transmit the contents one line +# at a time; +# - Otherwise, transmit the size of the file, followed by the file +# contents. +sub transmitfile +{ + my $filehash = shift; + my $options = shift; + + if ( defined ( $filehash ) and $filehash eq "deleted" ) + { + $log->warn("filehash is 'deleted'"); + return; + } + + die "Need filehash" unless ( defined ( $filehash ) and $filehash =~ /^[a-zA-Z0-9]{40}$/ ); + + my $type = `git cat-file -t $filehash`; + chomp $type; + + die ( "Invalid type '$type' (expected 'blob')" ) unless ( defined ( $type ) and $type eq "blob" ); + + my $size = `git cat-file -s $filehash`; + chomp $size; + + $log->debug("transmitfile($filehash) size=$size, type=$type"); + + if ( open my $fh, '-|', "git", "cat-file", "blob", $filehash ) + { + if ( defined ( $options->{targetfile} ) ) + { + my $targetfile = $options->{targetfile}; + open NEWFILE, ">", $targetfile or die("Couldn't open '$targetfile' for writing : $!"); + print NEWFILE $_ while ( <$fh> ); + close NEWFILE or die("Failed to write '$targetfile': $!"); + } elsif ( defined ( $options->{print} ) && $options->{print} ) { + while ( <$fh> ) { + if( /\n\z/ ) { + print 'M ', $_; + } else { + print 'MT text ', $_, "\n"; + } + } + } else { + print "$size\n"; + print while ( <$fh> ); + } + close $fh or die ("Couldn't close filehandle for transmitfile(): $!"); + } else { + die("Couldn't execute git-cat-file"); + } +} + +# This method takes a file name, and returns ( $dirpart, $filepart ) which +# refers to the directory portion and the file portion of the filename +# respectively +sub filenamesplit +{ + my $filename = shift; + my $fixforlocaldir = shift; + + my ( $filepart, $dirpart ) = ( $filename, "." ); + ( $filepart, $dirpart ) = ( $2, $1 ) if ( $filename =~ /(.*)\/(.*)/ ); + $dirpart .= "/"; + + if ( $fixforlocaldir ) + { + $dirpart =~ s/^$state->{prependdir}//; + } + + return ( $filepart, $dirpart ); +} + +sub filecleanup +{ + my $filename = shift; + + return undef unless(defined($filename)); + if ( $filename =~ /^\// ) + { + print "E absolute filenames '$filename' not supported by server\n"; + return undef; + } + + $filename =~ s/^\.\///g; + $filename = $state->{prependdir} . $filename; + return $filename; +} + +sub validateGitDir +{ + if( !defined($state->{CVSROOT}) ) + { + print "error 1 CVSROOT not specified\n"; + cleanupWorkTree(); + exit; + } + if( $ENV{GIT_DIR} ne ($state->{CVSROOT} . '/') ) + { + print "error 1 Internally inconsistent CVSROOT\n"; + cleanupWorkTree(); + exit; + } +} + +# Setup working directory in a work tree with the requested version +# loaded in the index. +sub setupWorkTree +{ + my ($ver) = @_; + + validateGitDir(); + + if( ( defined($work->{state}) && $work->{state} != 1 ) || + defined($work->{tmpDir}) ) + { + $log->warn("Bad work tree state management"); + print "error 1 Internal setup multiple work trees without cleanup\n"; + cleanupWorkTree(); + exit; + } + + $work->{workDir} = tempdir ( DIR => $TEMP_DIR ); + + if( !defined($work->{index}) ) + { + (undef, $work->{index}) = tempfile ( DIR => $TEMP_DIR, OPEN => 0 ); + } + + chdir $work->{workDir} or + die "Unable to chdir to $work->{workDir}\n"; + + $log->info("Setting up GIT_WORK_TREE as '.' in '$work->{workDir}', index file is '$work->{index}'"); + + $ENV{GIT_WORK_TREE} = "."; + $ENV{GIT_INDEX_FILE} = $work->{index}; + $work->{state} = 2; + + if($ver) + { + system("git","read-tree",$ver); + unless ($? == 0) + { + $log->warn("Error running git-read-tree"); + die "Error running git-read-tree $ver in $work->{workDir} $!\n"; + } + } + # else # req_annotate reads tree for each file +} + +# Ensure current directory is in some kind of working directory, +# with a recent version loaded in the index. +sub ensureWorkTree +{ + if( defined($work->{tmpDir}) ) + { + $log->warn("Bad work tree state management [ensureWorkTree()]"); + print "error 1 Internal setup multiple dirs without cleanup\n"; + cleanupWorkTree(); + exit; + } + if( $work->{state} ) + { + return; + } + + validateGitDir(); + + if( !defined($work->{emptyDir}) ) + { + $work->{emptyDir} = tempdir ( DIR => $TEMP_DIR, OPEN => 0); + } + chdir $work->{emptyDir} or + die "Unable to chdir to $work->{emptyDir}\n"; + + my $ver = `git show-ref -s refs/heads/$state->{module}`; + chomp $ver; + if ($ver !~ /^[0-9a-f]{40}$/) + { + $log->warn("Error from git show-ref -s refs/head$state->{module}"); + print "error 1 cannot find the current HEAD of module"; + cleanupWorkTree(); + exit; + } + + if( !defined($work->{index}) ) + { + (undef, $work->{index}) = tempfile ( DIR => $TEMP_DIR, OPEN => 0 ); + } + + $ENV{GIT_WORK_TREE} = "."; + $ENV{GIT_INDEX_FILE} = $work->{index}; + $work->{state} = 1; + + system("git","read-tree",$ver); + unless ($? == 0) + { + die "Error running git-read-tree $ver $!\n"; + } +} + +# Cleanup working directory that is not needed any longer. +sub cleanupWorkTree +{ + if( ! $work->{state} ) + { + return; + } + + chdir "/" or die "Unable to chdir '/'\n"; + + if( defined($work->{workDir}) ) + { + rmtree( $work->{workDir} ); + undef $work->{workDir}; + } + undef $work->{state}; +} + +# Setup a temporary directory (not a working tree), typically for +# merging dirty state as in req_update. +sub setupTmpDir +{ + $work->{tmpDir} = tempdir ( DIR => $TEMP_DIR ); + chdir $work->{tmpDir} or die "Unable to chdir $work->{tmpDir}\n"; + + return $work->{tmpDir}; +} + +# Clean up a previously setupTmpDir. Restore previous work tree if +# appropriate. +sub cleanupTmpDir +{ + if ( !defined($work->{tmpDir}) ) + { + $log->warn("cleanup tmpdir that has not been setup"); + die "Cleanup tmpDir that has not been setup\n"; + } + if( defined($work->{state}) ) + { + if( $work->{state} == 1 ) + { + chdir $work->{emptyDir} or + die "Unable to chdir to $work->{emptyDir}\n"; + } + elsif( $work->{state} == 2 ) + { + chdir $work->{workDir} or + die "Unable to chdir to $work->{emptyDir}\n"; + } + else + { + $log->warn("Inconsistent work dir state"); + die "Inconsistent work dir state\n"; + } + } + else + { + chdir "/" or die "Unable to chdir '/'\n"; + } +} + +# Given a path, this function returns a string containing the kopts +# that should go into that path's Entries line. For example, a binary +# file should get -kb. +sub kopts_from_path +{ + my ($path, $srcType, $name) = @_; + + if ( defined ( $cfg->{gitcvs}{usecrlfattr} ) and + $cfg->{gitcvs}{usecrlfattr} =~ /\s*(1|true|yes)\s*$/i ) + { + my ($val) = check_attr( "text", $path ); + if ( $val eq "unspecified" ) + { + $val = check_attr( "crlf", $path ); + } + if ( $val eq "unset" ) + { + return "-kb" + } + elsif ( check_attr( "eol", $path ) ne "unspecified" || + $val eq "set" || $val eq "input" ) + { + return ""; + } + else + { + $log->info("Unrecognized check_attr crlf $path : $val"); + } + } + + if ( defined ( $cfg->{gitcvs}{allbinary} ) ) + { + if( ($cfg->{gitcvs}{allbinary} =~ /^\s*(1|true|yes)\s*$/i) ) + { + return "-kb"; + } + elsif( ($cfg->{gitcvs}{allbinary} =~ /^\s*guess\s*$/i) ) + { + if( $srcType eq "sha1Or-k" && + !defined($name) ) + { + my ($ret)=$state->{entries}{$path}{options}; + if( !defined($ret) ) + { + $ret=$state->{opt}{k}; + if(defined($ret)) + { + $ret="-k$ret"; + } + else + { + $ret=""; + } + } + if( ! ($ret=~/^(|-kb|-kkv|-kkvl|-kk|-ko|-kv)$/) ) + { + print "E Bad -k option\n"; + $log->warn("Bad -k option: $ret"); + die "Error: Bad -k option: $ret\n"; + } + + return $ret; + } + else + { + if( is_binary($srcType,$name) ) + { + $log->debug("... as binary"); + return "-kb"; + } + else + { + $log->debug("... as text"); + } + } + } + } + # Return "" to give no special treatment to any path + return ""; +} + +sub check_attr +{ + my ($attr,$path) = @_; + ensureWorkTree(); + if ( open my $fh, '-|', "git", "check-attr", $attr, "--", $path ) + { + my $val = <$fh>; + close $fh; + $val =~ s/.*: ([^:\r\n]*)\s*$/$1/; + return $val; + } + else + { + return undef; + } +} + +# This should have the same heuristics as convert.c:is_binary() and related. +# Note that the bare CR test is done by callers in convert.c. +sub is_binary +{ + my ($srcType,$name) = @_; + $log->debug("is_binary($srcType,$name)"); + + # Minimize amount of interpreted code run in the inner per-character + # loop for large files, by totalling each character value and + # then analyzing the totals. + my @counts; + my $i; + for($i=0;$i<256;$i++) + { + $counts[$i]=0; + } + + my $fh = open_blob_or_die($srcType,$name); + my $line; + while( defined($line=<$fh>) ) + { + # Any '\0' and bare CR are considered binary. + if( $line =~ /\0|(\r[^\n])/ ) + { + close($fh); + return 1; + } + + # Count up each character in the line: + my $len=length($line); + for($i=0;$i<$len;$i++) + { + $counts[ord(substr($line,$i,1))]++; + } + } + close $fh; + + # Don't count CR and LF as either printable/nonprintable + $counts[ord("\n")]=0; + $counts[ord("\r")]=0; + + # Categorize individual character count into printable and nonprintable: + my $printable=0; + my $nonprintable=0; + for($i=0;$i<256;$i++) + { + if( $i < 32 && + $i != ord("\b") && + $i != ord("\t") && + $i != 033 && # ESC + $i != 014 ) # FF + { + $nonprintable+=$counts[$i]; + } + elsif( $i==127 ) # DEL + { + $nonprintable+=$counts[$i]; + } + else + { + $printable+=$counts[$i]; + } + } + + return ($printable >> 7) < $nonprintable; +} + +# Returns open file handle. Possible invocations: +# - open_blob_or_die("file",$filename); +# - open_blob_or_die("sha1",$filehash); +sub open_blob_or_die +{ + my ($srcType,$name) = @_; + my ($fh); + if( $srcType eq "file" ) + { + if( !open $fh,"<",$name ) + { + $log->warn("Unable to open file $name: $!"); + die "Unable to open file $name: $!\n"; + } + } + elsif( $srcType eq "sha1" || $srcType eq "sha1Or-k" ) + { + unless ( defined ( $name ) and $name =~ /^[a-zA-Z0-9]{40}$/ ) + { + $log->warn("Need filehash"); + die "Need filehash\n"; + } + + my $type = `git cat-file -t $name`; + chomp $type; + + unless ( defined ( $type ) and $type eq "blob" ) + { + $log->warn("Invalid type '$type' for '$name'"); + die ( "Invalid type '$type' (expected 'blob')" ) + } + + my $size = `git cat-file -s $name`; + chomp $size; + + $log->debug("open_blob_or_die($name) size=$size, type=$type"); + + unless( open $fh, '-|', "git", "cat-file", "blob", $name ) + { + $log->warn("Unable to open sha1 $name"); + die "Unable to open sha1 $name\n"; + } + } + else + { + $log->warn("Unknown type of blob source: $srcType"); + die "Unknown type of blob source: $srcType\n"; + } + return $fh; +} + +# Generate a CVS author name from Git author information, by taking the local +# part of the email address and replacing characters not in the Portable +# Filename Character Set (see IEEE Std 1003.1-2001, 3.276) by underscores. CVS +# Login names are Unix login names, which should be restricted to this +# character set. +sub cvs_author +{ + my $author_line = shift; + (my $author) = $author_line =~ /<([^@>]*)/; + + $author =~ s/[^-a-zA-Z0-9_.]/_/g; + $author =~ s/^-/_/; + + $author; +} + + +sub descramble +{ + # This table is from src/scramble.c in the CVS source + my @SHIFTS = ( + 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, + 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, + 114,120, 53, 79, 96,109, 72,108, 70, 64, 76, 67,116, 74, 68, 87, + 111, 52, 75,119, 49, 34, 82, 81, 95, 65,112, 86,118,110,122,105, + 41, 57, 83, 43, 46,102, 40, 89, 38,103, 45, 50, 42,123, 91, 35, + 125, 55, 54, 66,124,126, 59, 47, 92, 71,115, 78, 88,107,106, 56, + 36,121,117,104,101,100, 69, 73, 99, 63, 94, 93, 39, 37, 61, 48, + 58,113, 32, 90, 44, 98, 60, 51, 33, 97, 62, 77, 84, 80, 85,223, + 225,216,187,166,229,189,222,188,141,249,148,200,184,136,248,190, + 199,170,181,204,138,232,218,183,255,234,220,247,213,203,226,193, + 174,172,228,252,217,201,131,230,197,211,145,238,161,179,160,212, + 207,221,254,173,202,146,224,151,140,196,205,130,135,133,143,246, + 192,159,244,239,185,168,215,144,139,165,180,157,147,186,214,176, + 227,231,219,169,175,156,206,198,129,164,150,210,154,177,134,127, + 182,128,158,208,162,132,167,209,149,241,153,251,237,236,171,195, + 243,233,253,240,194,250,191,155,142,137,245,235,163,242,178,152 + ); + my ($str) = @_; + + # This should never happen, the same password format (A) has been + # used by CVS since the beginning of time + { + my $fmt = substr($str, 0, 1); + die "invalid password format `$fmt'" unless $fmt eq 'A'; + } + + my @str = unpack "C*", substr($str, 1); + my $ret = join '', map { chr $SHIFTS[$_] } @str; + return $ret; +} + + +package GITCVS::log; + +#### +#### Copyright The Open University UK - 2006. +#### +#### Authors: Martyn Smith +#### Martin Langhoff +#### +#### + +use strict; +use warnings; + +=head1 NAME + +GITCVS::log + +=head1 DESCRIPTION + +This module provides very crude logging with a similar interface to +Log::Log4perl + +=head1 METHODS + +=cut + +=head2 new + +Creates a new log object, optionally you can specify a filename here to +indicate the file to log to. If no log file is specified, you can specify one +later with method setfile, or indicate you no longer want logging with method +nofile. + +Until one of these methods is called, all log calls will buffer messages ready +to write out. + +=cut +sub new +{ + my $class = shift; + my $filename = shift; + + my $self = {}; + + bless $self, $class; + + if ( defined ( $filename ) ) + { + open $self->{fh}, ">>", $filename or die("Couldn't open '$filename' for writing : $!"); + } + + return $self; +} + +=head2 setfile + +This methods takes a filename, and attempts to open that file as the log file. +If successful, all buffered data is written out to the file, and any further +logging is written directly to the file. + +=cut +sub setfile +{ + my $self = shift; + my $filename = shift; + + if ( defined ( $filename ) ) + { + open $self->{fh}, ">>", $filename or die("Couldn't open '$filename' for writing : $!"); + } + + return unless ( defined ( $self->{buffer} ) and ref $self->{buffer} eq "ARRAY" ); + + while ( my $line = shift @{$self->{buffer}} ) + { + print {$self->{fh}} $line; + } +} + +=head2 nofile + +This method indicates no logging is going to be used. It flushes any entries in +the internal buffer, and sets a flag to ensure no further data is put there. + +=cut +sub nofile +{ + my $self = shift; + + $self->{nolog} = 1; + + return unless ( defined ( $self->{buffer} ) and ref $self->{buffer} eq "ARRAY" ); + + $self->{buffer} = []; +} + +=head2 _logopen + +Internal method. Returns true if the log file is open, false otherwise. + +=cut +sub _logopen +{ + my $self = shift; + + return 1 if ( defined ( $self->{fh} ) and ref $self->{fh} eq "GLOB" ); + return 0; +} + +=head2 debug info warn fatal + +These four methods are wrappers to _log. They provide the actual interface for +logging data. + +=cut +sub debug { my $self = shift; $self->_log("debug", @_); } +sub info { my $self = shift; $self->_log("info" , @_); } +sub warn { my $self = shift; $self->_log("warn" , @_); } +sub fatal { my $self = shift; $self->_log("fatal", @_); } + +=head2 _log + +This is an internal method called by the logging functions. It generates a +timestamp and pushes the logged line either to file, or internal buffer. + +=cut +sub _log +{ + my $self = shift; + my $level = shift; + + return if ( $self->{nolog} ); + + my @time = localtime; + my $timestring = sprintf("%4d-%02d-%02d %02d:%02d:%02d : %-5s", + $time[5] + 1900, + $time[4] + 1, + $time[3], + $time[2], + $time[1], + $time[0], + uc $level, + ); + + if ( $self->_logopen ) + { + print {$self->{fh}} $timestring . " - " . join(" ",@_) . "\n"; + } else { + push @{$self->{buffer}}, $timestring . " - " . join(" ",@_) . "\n"; + } +} + +=head2 DESTROY + +This method simply closes the file handle if one is open + +=cut +sub DESTROY +{ + my $self = shift; + + if ( $self->_logopen ) + { + close $self->{fh}; + } +} + +package GITCVS::updater; + +#### +#### Copyright The Open University UK - 2006. +#### +#### Authors: Martyn Smith +#### Martin Langhoff +#### +#### + +use strict; +use warnings; +use DBI; + +=head1 METHODS + +=cut + +=head2 new + +=cut +sub new +{ + my $class = shift; + my $config = shift; + my $module = shift; + my $log = shift; + + die "Need to specify a git repository" unless ( defined($config) and -d $config ); + die "Need to specify a module" unless ( defined($module) ); + + $class = ref($class) || $class; + + my $self = {}; + + bless $self, $class; + + $self->{valid_tables} = {'revision' => 1, + 'revision_ix1' => 1, + 'revision_ix2' => 1, + 'head' => 1, + 'head_ix1' => 1, + 'properties' => 1, + 'commitmsgs' => 1}; + + $self->{module} = $module; + $self->{git_path} = $config . "/"; + + $self->{log} = $log; + + die "Git repo '$self->{git_path}' doesn't exist" unless ( -d $self->{git_path} ); + + $self->{dbdriver} = $cfg->{gitcvs}{$state->{method}}{dbdriver} || + $cfg->{gitcvs}{dbdriver} || "SQLite"; + $self->{dbname} = $cfg->{gitcvs}{$state->{method}}{dbname} || + $cfg->{gitcvs}{dbname} || "%Ggitcvs.%m.sqlite"; + $self->{dbuser} = $cfg->{gitcvs}{$state->{method}}{dbuser} || + $cfg->{gitcvs}{dbuser} || ""; + $self->{dbpass} = $cfg->{gitcvs}{$state->{method}}{dbpass} || + $cfg->{gitcvs}{dbpass} || ""; + $self->{dbtablenameprefix} = $cfg->{gitcvs}{$state->{method}}{dbtablenameprefix} || + $cfg->{gitcvs}{dbtablenameprefix} || ""; + my %mapping = ( m => $module, + a => $state->{method}, + u => getlogin || getpwuid($<) || $<, + G => $self->{git_path}, + g => mangle_dirname($self->{git_path}), + ); + $self->{dbname} =~ s/%([mauGg])/$mapping{$1}/eg; + $self->{dbuser} =~ s/%([mauGg])/$mapping{$1}/eg; + $self->{dbtablenameprefix} =~ s/%([mauGg])/$mapping{$1}/eg; + $self->{dbtablenameprefix} = mangle_tablename($self->{dbtablenameprefix}); + + die "Invalid char ':' in dbdriver" if $self->{dbdriver} =~ /:/; + die "Invalid char ';' in dbname" if $self->{dbname} =~ /;/; + $self->{dbh} = DBI->connect("dbi:$self->{dbdriver}:dbname=$self->{dbname}", + $self->{dbuser}, + $self->{dbpass}); + die "Error connecting to database\n" unless defined $self->{dbh}; + + $self->{tables} = {}; + foreach my $table ( keys %{$self->{dbh}->table_info(undef,undef,undef,'TABLE')->fetchall_hashref('TABLE_NAME')} ) + { + $self->{tables}{$table} = 1; + } + + # Construct the revision table if required + unless ( $self->{tables}{$self->tablename("revision")} ) + { + my $tablename = $self->tablename("revision"); + my $ix1name = $self->tablename("revision_ix1"); + my $ix2name = $self->tablename("revision_ix2"); + $self->{dbh}->do(" + CREATE TABLE $tablename ( + name TEXT NOT NULL, + revision INTEGER NOT NULL, + filehash TEXT NOT NULL, + commithash TEXT NOT NULL, + author TEXT NOT NULL, + modified TEXT NOT NULL, + mode TEXT NOT NULL + ) + "); + $self->{dbh}->do(" + CREATE INDEX $ix1name + ON $tablename (name,revision) + "); + $self->{dbh}->do(" + CREATE INDEX $ix2name + ON $tablename (name,commithash) + "); + } + + # Construct the head table if required + unless ( $self->{tables}{$self->tablename("head")} ) + { + my $tablename = $self->tablename("head"); + my $ix1name = $self->tablename("head_ix1"); + $self->{dbh}->do(" + CREATE TABLE $tablename ( + name TEXT NOT NULL, + revision INTEGER NOT NULL, + filehash TEXT NOT NULL, + commithash TEXT NOT NULL, + author TEXT NOT NULL, + modified TEXT NOT NULL, + mode TEXT NOT NULL + ) + "); + $self->{dbh}->do(" + CREATE INDEX $ix1name + ON $tablename (name) + "); + } + + # Construct the properties table if required + unless ( $self->{tables}{$self->tablename("properties")} ) + { + my $tablename = $self->tablename("properties"); + $self->{dbh}->do(" + CREATE TABLE $tablename ( + key TEXT NOT NULL PRIMARY KEY, + value TEXT + ) + "); + } + + # Construct the commitmsgs table if required + unless ( $self->{tables}{$self->tablename("commitmsgs")} ) + { + my $tablename = $self->tablename("commitmsgs"); + $self->{dbh}->do(" + CREATE TABLE $tablename ( + key TEXT NOT NULL PRIMARY KEY, + value TEXT + ) + "); + } + + return $self; +} + +=head2 tablename + +=cut +sub tablename +{ + my $self = shift; + my $name = shift; + + if (exists $self->{valid_tables}{$name}) { + return $self->{dbtablenameprefix} . $name; + } else { + return undef; + } +} + +=head2 update + +=cut +sub update +{ + my $self = shift; + + # first lets get the commit list + $ENV{GIT_DIR} = $self->{git_path}; + + my $commitsha1 = `git rev-parse $self->{module}`; + chomp $commitsha1; + + my $commitinfo = `git cat-file commit $self->{module} 2>&1`; + unless ( $commitinfo =~ /tree\s+[a-zA-Z0-9]{40}/ ) + { + die("Invalid module '$self->{module}'"); + } + + + my $git_log; + my $lastcommit = $self->_get_prop("last_commit"); + + if (defined $lastcommit && $lastcommit eq $commitsha1) { # up-to-date + return 1; + } + + # Start exclusive lock here... + $self->{dbh}->begin_work() or die "Cannot lock database for BEGIN"; + + # TODO: log processing is memory bound + # if we can parse into a 2nd file that is in reverse order + # we can probably do something really efficient + my @git_log_params = ('--pretty', '--parents', '--topo-order'); + + if (defined $lastcommit) { + push @git_log_params, "$lastcommit..$self->{module}"; + } else { + push @git_log_params, $self->{module}; + } + # git-rev-list is the backend / plumbing version of git-log + open(GITLOG, '-|', 'git', 'rev-list', @git_log_params) or die "Cannot call git-rev-list: $!"; + + my @commits; + + my %commit = (); + + while ( ) + { + chomp; + if (m/^commit\s+(.*)$/) { + # on ^commit lines put the just seen commit in the stack + # and prime things for the next one + if (keys %commit) { + my %copy = %commit; + unshift @commits, \%copy; + %commit = (); + } + my @parents = split(m/\s+/, $1); + $commit{hash} = shift @parents; + $commit{parents} = \@parents; + } elsif (m/^(\w+?):\s+(.*)$/ && !exists($commit{message})) { + # on rfc822-like lines seen before we see any message, + # lowercase the entry and put it in the hash as key-value + $commit{lc($1)} = $2; + } else { + # message lines - skip initial empty line + # and trim whitespace + if (!exists($commit{message}) && m/^\s*$/) { + # define it to mark the end of headers + $commit{message} = ''; + next; + } + s/^\s+//; s/\s+$//; # trim ws + $commit{message} .= $_ . "\n"; + } + } + close GITLOG; + + unshift @commits, \%commit if ( keys %commit ); + + # Now all the commits are in the @commits bucket + # ordered by time DESC. for each commit that needs processing, + # determine whether it's following the last head we've seen or if + # it's on its own branch, grab a file list, and add whatever's changed + # NOTE: $lastcommit refers to the last commit from previous run + # $lastpicked is the last commit we picked in this run + my $lastpicked; + my $head = {}; + if (defined $lastcommit) { + $lastpicked = $lastcommit; + } + + my $committotal = scalar(@commits); + my $commitcount = 0; + + # Load the head table into $head (for cached lookups during the update process) + foreach my $file ( @{$self->gethead()} ) + { + $head->{$file->{name}} = $file; + } + + foreach my $commit ( @commits ) + { + $self->{log}->debug("GITCVS::updater - Processing commit $commit->{hash} (" . (++$commitcount) . " of $committotal)"); + if (defined $lastpicked) + { + if (!in_array($lastpicked, @{$commit->{parents}})) + { + # skip, we'll see this delta + # as part of a merge later + # warn "skipping off-track $commit->{hash}\n"; + next; + } elsif (@{$commit->{parents}} > 1) { + # it is a merge commit, for each parent that is + # not $lastpicked, see if we can get a log + # from the merge-base to that parent to put it + # in the message as a merge summary. + my @parents = @{$commit->{parents}}; + foreach my $parent (@parents) { + # git-merge-base can potentially (but rarely) throw + # several candidate merge bases. let's assume + # that the first one is the best one. + if ($parent eq $lastpicked) { + next; + } + my $base = eval { + safe_pipe_capture('git', 'merge-base', + $lastpicked, $parent); + }; + # The two branches may not be related at all, + # in which case merge base simply fails to find + # any, but that's Ok. + next if ($@); + + chomp $base; + if ($base) { + my @merged; + # print "want to log between $base $parent \n"; + open(GITLOG, '-|', 'git', 'log', '--pretty=medium', "$base..$parent") + or die "Cannot call git-log: $!"; + my $mergedhash; + while () { + chomp; + if (!defined $mergedhash) { + if (m/^commit\s+(.+)$/) { + $mergedhash = $1; + } else { + next; + } + } else { + # grab the first line that looks non-rfc822 + # aka has content after leading space + if (m/^\s+(\S.*)$/) { + my $title = $1; + $title = substr($title,0,100); # truncate + unshift @merged, "$mergedhash $title"; + undef $mergedhash; + } + } + } + close GITLOG; + if (@merged) { + $commit->{mergemsg} = $commit->{message}; + $commit->{mergemsg} .= "\nSummary of merged commits:\n\n"; + foreach my $summary (@merged) { + $commit->{mergemsg} .= "\t$summary\n"; + } + $commit->{mergemsg} .= "\n\n"; + # print "Message for $commit->{hash} \n$commit->{mergemsg}"; + } + } + } + } + } + + # convert the date to CVS-happy format + $commit->{date} = "$2 $1 $4 $3 $5" if ( $commit->{date} =~ /^\w+\s+(\w+)\s+(\d+)\s+(\d+:\d+:\d+)\s+(\d+)\s+([+-]\d+)$/ ); + + if ( defined ( $lastpicked ) ) + { + my $filepipe = open(FILELIST, '-|', 'git', 'diff-tree', '-z', '-r', $lastpicked, $commit->{hash}) or die("Cannot call git-diff-tree : $!"); + local ($/) = "\0"; + while ( ) + { + chomp; + unless ( /^:\d{6}\s+\d{3}(\d)\d{2}\s+[a-zA-Z0-9]{40}\s+([a-zA-Z0-9]{40})\s+(\w)$/o ) + { + die("Couldn't process git-diff-tree line : $_"); + } + my ($mode, $hash, $change) = ($1, $2, $3); + my $name = ; + chomp($name); + + # $log->debug("File mode=$mode, hash=$hash, change=$change, name=$name"); + + my $git_perms = ""; + $git_perms .= "r" if ( $mode & 4 ); + $git_perms .= "w" if ( $mode & 2 ); + $git_perms .= "x" if ( $mode & 1 ); + $git_perms = "rw" if ( $git_perms eq "" ); + + if ( $change eq "D" ) + { + #$log->debug("DELETE $name"); + $head->{$name} = { + name => $name, + revision => $head->{$name}{revision} + 1, + filehash => "deleted", + commithash => $commit->{hash}, + modified => $commit->{date}, + author => $commit->{author}, + mode => $git_perms, + }; + $self->insert_rev($name, $head->{$name}{revision}, $hash, $commit->{hash}, $commit->{date}, $commit->{author}, $git_perms); + } + elsif ( $change eq "M" || $change eq "T" ) + { + #$log->debug("MODIFIED $name"); + $head->{$name} = { + name => $name, + revision => $head->{$name}{revision} + 1, + filehash => $hash, + commithash => $commit->{hash}, + modified => $commit->{date}, + author => $commit->{author}, + mode => $git_perms, + }; + $self->insert_rev($name, $head->{$name}{revision}, $hash, $commit->{hash}, $commit->{date}, $commit->{author}, $git_perms); + } + elsif ( $change eq "A" ) + { + #$log->debug("ADDED $name"); + $head->{$name} = { + name => $name, + revision => $head->{$name}{revision} ? $head->{$name}{revision}+1 : 1, + filehash => $hash, + commithash => $commit->{hash}, + modified => $commit->{date}, + author => $commit->{author}, + mode => $git_perms, + }; + $self->insert_rev($name, $head->{$name}{revision}, $hash, $commit->{hash}, $commit->{date}, $commit->{author}, $git_perms); + } + else + { + $log->warn("UNKNOWN FILE CHANGE mode=$mode, hash=$hash, change=$change, name=$name"); + die; + } + } + close FILELIST; + } else { + # this is used to detect files removed from the repo + my $seen_files = {}; + + my $filepipe = open(FILELIST, '-|', 'git', 'ls-tree', '-z', '-r', $commit->{hash}) or die("Cannot call git-ls-tree : $!"); + local $/ = "\0"; + while ( ) + { + chomp; + unless ( /^(\d+)\s+(\w+)\s+([a-zA-Z0-9]+)\t(.*)$/o ) + { + die("Couldn't process git-ls-tree line : $_"); + } + + my ( $git_perms, $git_type, $git_hash, $git_filename ) = ( $1, $2, $3, $4 ); + + $seen_files->{$git_filename} = 1; + + my ( $oldhash, $oldrevision, $oldmode ) = ( + $head->{$git_filename}{filehash}, + $head->{$git_filename}{revision}, + $head->{$git_filename}{mode} + ); + + if ( $git_perms =~ /^\d\d\d(\d)\d\d/o ) + { + $git_perms = ""; + $git_perms .= "r" if ( $1 & 4 ); + $git_perms .= "w" if ( $1 & 2 ); + $git_perms .= "x" if ( $1 & 1 ); + } else { + $git_perms = "rw"; + } + + # unless the file exists with the same hash, we need to update it ... + unless ( defined($oldhash) and $oldhash eq $git_hash and defined($oldmode) and $oldmode eq $git_perms ) + { + my $newrevision = ( $oldrevision or 0 ) + 1; + + $head->{$git_filename} = { + name => $git_filename, + revision => $newrevision, + filehash => $git_hash, + commithash => $commit->{hash}, + modified => $commit->{date}, + author => $commit->{author}, + mode => $git_perms, + }; + + + $self->insert_rev($git_filename, $newrevision, $git_hash, $commit->{hash}, $commit->{date}, $commit->{author}, $git_perms); + } + } + close FILELIST; + + # Detect deleted files + foreach my $file ( keys %$head ) + { + unless ( exists $seen_files->{$file} or $head->{$file}{filehash} eq "deleted" ) + { + $head->{$file}{revision}++; + $head->{$file}{filehash} = "deleted"; + $head->{$file}{commithash} = $commit->{hash}; + $head->{$file}{modified} = $commit->{date}; + $head->{$file}{author} = $commit->{author}; + + $self->insert_rev($file, $head->{$file}{revision}, $head->{$file}{filehash}, $commit->{hash}, $commit->{date}, $commit->{author}, $head->{$file}{mode}); + } + } + # END : "Detect deleted files" + } + + + if (exists $commit->{mergemsg}) + { + $self->insert_mergelog($commit->{hash}, $commit->{mergemsg}); + } + + $lastpicked = $commit->{hash}; + + $self->_set_prop("last_commit", $commit->{hash}); + } + + $self->delete_head(); + foreach my $file ( keys %$head ) + { + $self->insert_head( + $file, + $head->{$file}{revision}, + $head->{$file}{filehash}, + $head->{$file}{commithash}, + $head->{$file}{modified}, + $head->{$file}{author}, + $head->{$file}{mode}, + ); + } + # invalidate the gethead cache + $self->{gethead_cache} = undef; + + + # Ending exclusive lock here + $self->{dbh}->commit() or die "Failed to commit changes to SQLite"; +} + +sub insert_rev +{ + my $self = shift; + my $name = shift; + my $revision = shift; + my $filehash = shift; + my $commithash = shift; + my $modified = shift; + my $author = shift; + my $mode = shift; + my $tablename = $self->tablename("revision"); + + my $insert_rev = $self->{dbh}->prepare_cached("INSERT INTO $tablename (name, revision, filehash, commithash, modified, author, mode) VALUES (?,?,?,?,?,?,?)",{},1); + $insert_rev->execute($name, $revision, $filehash, $commithash, $modified, $author, $mode); +} + +sub insert_mergelog +{ + my $self = shift; + my $key = shift; + my $value = shift; + my $tablename = $self->tablename("commitmsgs"); + + my $insert_mergelog = $self->{dbh}->prepare_cached("INSERT INTO $tablename (key, value) VALUES (?,?)",{},1); + $insert_mergelog->execute($key, $value); +} + +sub delete_head +{ + my $self = shift; + my $tablename = $self->tablename("head"); + + my $delete_head = $self->{dbh}->prepare_cached("DELETE FROM $tablename",{},1); + $delete_head->execute(); +} + +sub insert_head +{ + my $self = shift; + my $name = shift; + my $revision = shift; + my $filehash = shift; + my $commithash = shift; + my $modified = shift; + my $author = shift; + my $mode = shift; + my $tablename = $self->tablename("head"); + + my $insert_head = $self->{dbh}->prepare_cached("INSERT INTO $tablename (name, revision, filehash, commithash, modified, author, mode) VALUES (?,?,?,?,?,?,?)",{},1); + $insert_head->execute($name, $revision, $filehash, $commithash, $modified, $author, $mode); +} + +sub _headrev +{ + my $self = shift; + my $filename = shift; + my $tablename = $self->tablename("head"); + + my $db_query = $self->{dbh}->prepare_cached("SELECT filehash, revision, mode FROM $tablename WHERE name=?",{},1); + $db_query->execute($filename); + my ( $hash, $revision, $mode ) = $db_query->fetchrow_array; + + return ( $hash, $revision, $mode ); +} + +sub _get_prop +{ + my $self = shift; + my $key = shift; + my $tablename = $self->tablename("properties"); + + my $db_query = $self->{dbh}->prepare_cached("SELECT value FROM $tablename WHERE key=?",{},1); + $db_query->execute($key); + my ( $value ) = $db_query->fetchrow_array; + + return $value; +} + +sub _set_prop +{ + my $self = shift; + my $key = shift; + my $value = shift; + my $tablename = $self->tablename("properties"); + + my $db_query = $self->{dbh}->prepare_cached("UPDATE $tablename SET value=? WHERE key=?",{},1); + $db_query->execute($value, $key); + + unless ( $db_query->rows ) + { + $db_query = $self->{dbh}->prepare_cached("INSERT INTO $tablename (key, value) VALUES (?,?)",{},1); + $db_query->execute($key, $value); + } + + return $value; +} + +=head2 gethead + +=cut + +sub gethead +{ + my $self = shift; + my $tablename = $self->tablename("head"); + + return $self->{gethead_cache} if ( defined ( $self->{gethead_cache} ) ); + + my $db_query = $self->{dbh}->prepare_cached("SELECT name, filehash, mode, revision, modified, commithash, author FROM $tablename ORDER BY name ASC",{},1); + $db_query->execute(); + + my $tree = []; + while ( my $file = $db_query->fetchrow_hashref ) + { + push @$tree, $file; + } + + $self->{gethead_cache} = $tree; + + return $tree; +} + +=head2 getlog + +=cut + +sub getlog +{ + my $self = shift; + my $filename = shift; + my $tablename = $self->tablename("revision"); + + my $db_query = $self->{dbh}->prepare_cached("SELECT name, filehash, author, mode, revision, modified, commithash FROM $tablename WHERE name=? ORDER BY revision DESC",{},1); + $db_query->execute($filename); + + my $tree = []; + while ( my $file = $db_query->fetchrow_hashref ) + { + push @$tree, $file; + } + + return $tree; +} + +=head2 getmeta + +This function takes a filename (with path) argument and returns a hashref of +metadata for that file. + +=cut + +sub getmeta +{ + my $self = shift; + my $filename = shift; + my $revision = shift; + my $tablename_rev = $self->tablename("revision"); + my $tablename_head = $self->tablename("head"); + + my $db_query; + if ( defined($revision) and $revision =~ /^\d+$/ ) + { + $db_query = $self->{dbh}->prepare_cached("SELECT * FROM $tablename_rev WHERE name=? AND revision=?",{},1); + $db_query->execute($filename, $revision); + } + elsif ( defined($revision) and $revision =~ /^[a-zA-Z0-9]{40}$/ ) + { + $db_query = $self->{dbh}->prepare_cached("SELECT * FROM $tablename_rev WHERE name=? AND commithash=?",{},1); + $db_query->execute($filename, $revision); + } else { + $db_query = $self->{dbh}->prepare_cached("SELECT * FROM $tablename_head WHERE name=?",{},1); + $db_query->execute($filename); + } + + return $db_query->fetchrow_hashref; +} + +=head2 commitmessage + +this function takes a commithash and returns the commit message for that commit + +=cut +sub commitmessage +{ + my $self = shift; + my $commithash = shift; + my $tablename = $self->tablename("commitmsgs"); + + die("Need commithash") unless ( defined($commithash) and $commithash =~ /^[a-zA-Z0-9]{40}$/ ); + + my $db_query; + $db_query = $self->{dbh}->prepare_cached("SELECT value FROM $tablename WHERE key=?",{},1); + $db_query->execute($commithash); + + my ( $message ) = $db_query->fetchrow_array; + + if ( defined ( $message ) ) + { + $message .= " " if ( $message =~ /\n$/ ); + return $message; + } + + my @lines = safe_pipe_capture("git", "cat-file", "commit", $commithash); + shift @lines while ( $lines[0] =~ /\S/ ); + $message = join("",@lines); + $message .= " " if ( $message =~ /\n$/ ); + return $message; +} + +=head2 gethistory + +This function takes a filename (with path) argument and returns an arrayofarrays +containing revision,filehash,commithash ordered by revision descending + +=cut +sub gethistory +{ + my $self = shift; + my $filename = shift; + my $tablename = $self->tablename("revision"); + + my $db_query; + $db_query = $self->{dbh}->prepare_cached("SELECT revision, filehash, commithash FROM $tablename WHERE name=? ORDER BY revision DESC",{},1); + $db_query->execute($filename); + + return $db_query->fetchall_arrayref; +} + +=head2 gethistorydense + +This function takes a filename (with path) argument and returns an arrayofarrays +containing revision,filehash,commithash ordered by revision descending. + +This version of gethistory skips deleted entries -- so it is useful for annotate. +The 'dense' part is a reference to a '--dense' option available for git-rev-list +and other git tools that depend on it. + +=cut +sub gethistorydense +{ + my $self = shift; + my $filename = shift; + my $tablename = $self->tablename("revision"); + + my $db_query; + $db_query = $self->{dbh}->prepare_cached("SELECT revision, filehash, commithash FROM $tablename WHERE name=? AND filehash!='deleted' ORDER BY revision DESC",{},1); + $db_query->execute($filename); + + return $db_query->fetchall_arrayref; +} + +=head2 in_array() + +from Array::PAT - mimics the in_array() function +found in PHP. Yuck but works for small arrays. + +=cut +sub in_array +{ + my ($check, @array) = @_; + my $retval = 0; + foreach my $test (@array){ + if($check eq $test){ + $retval = 1; + } + } + return $retval; +} + +=head2 safe_pipe_capture + +an alternative to `command` that allows input to be passed as an array +to work around shell problems with weird characters in arguments + +=cut +sub safe_pipe_capture { + + my @output; + + if (my $pid = open my $child, '-|') { + @output = (<$child>); + close $child or die join(' ',@_).": $! $?"; + } else { + exec(@_) or die "$! $?"; # exec() can fail the executable can't be found + } + return wantarray ? @output : join('',@output); +} + +=head2 mangle_dirname + +create a string from a directory name that is suitable to use as +part of a filename, mainly by converting all chars except \w.- to _ + +=cut +sub mangle_dirname { + my $dirname = shift; + return unless defined $dirname; + + $dirname =~ s/[^\w.-]/_/g; + + return $dirname; +} + +=head2 mangle_tablename + +create a string from a that is suitable to use as part of an SQL table +name, mainly by converting all chars except \w to _ + +=cut +sub mangle_tablename { + my $tablename = shift; + return unless defined $tablename; + + $tablename =~ s/[^\w_]/_/g; + + return $tablename; +} + +1; diff --git a/SparkleShare/Mac/git/bin/git-receive-pack b/SparkleShare/Mac/git/bin/git-receive-pack new file mode 120000 index 00000000..0899c299 --- /dev/null +++ b/SparkleShare/Mac/git/bin/git-receive-pack @@ -0,0 +1 @@ +git \ No newline at end of file diff --git a/SparkleShare/Mac/git/bin/git-shell b/SparkleShare/Mac/git/bin/git-shell new file mode 100755 index 00000000..647fe452 Binary files /dev/null and b/SparkleShare/Mac/git/bin/git-shell differ diff --git a/SparkleShare/Mac/git/bin/git-upload-archive b/SparkleShare/Mac/git/bin/git-upload-archive new file mode 120000 index 00000000..0899c299 --- /dev/null +++ b/SparkleShare/Mac/git/bin/git-upload-archive @@ -0,0 +1 @@ +git \ No newline at end of file diff --git a/SparkleShare/Mac/git/bin/git-upload-pack b/SparkleShare/Mac/git/bin/git-upload-pack new file mode 100755 index 00000000..3cc4e6db Binary files /dev/null and b/SparkleShare/Mac/git/bin/git-upload-pack differ diff --git a/SparkleShare/Mac/git/bin/gitk b/SparkleShare/Mac/git/bin/gitk new file mode 100755 index 00000000..4cde0c49 --- /dev/null +++ b/SparkleShare/Mac/git/bin/gitk @@ -0,0 +1,11702 @@ +#!/bin/sh +# Tcl ignores the next line -*- tcl -*- \ +exec wish "$0" -- "$@" + +# Copyright © 2005-2009 Paul Mackerras. All rights reserved. +# This program is free software; it may be used, copied, modified +# and distributed under the terms of the GNU General Public Licence, +# either version 2, or (at your option) any later version. + +package require Tk + +proc gitdir {} { + global env + if {[info exists env(GIT_DIR)]} { + return $env(GIT_DIR) + } else { + return [exec git rev-parse --git-dir] + } +} + +# A simple scheduler for compute-intensive stuff. +# The aim is to make sure that event handlers for GUI actions can +# run at least every 50-100 ms. Unfortunately fileevent handlers are +# run before X event handlers, so reading from a fast source can +# make the GUI completely unresponsive. +proc run args { + global isonrunq runq currunq + + set script $args + if {[info exists isonrunq($script)]} return + if {$runq eq {} && ![info exists currunq]} { + after idle dorunq + } + lappend runq [list {} $script] + set isonrunq($script) 1 +} + +proc filerun {fd script} { + fileevent $fd readable [list filereadable $fd $script] +} + +proc filereadable {fd script} { + global runq currunq + + fileevent $fd readable {} + if {$runq eq {} && ![info exists currunq]} { + after idle dorunq + } + lappend runq [list $fd $script] +} + +proc nukefile {fd} { + global runq + + for {set i 0} {$i < [llength $runq]} {} { + if {[lindex $runq $i 0] eq $fd} { + set runq [lreplace $runq $i $i] + } else { + incr i + } + } +} + +proc dorunq {} { + global isonrunq runq currunq + + set tstart [clock clicks -milliseconds] + set t0 $tstart + while {[llength $runq] > 0} { + set fd [lindex $runq 0 0] + set script [lindex $runq 0 1] + set currunq [lindex $runq 0] + set runq [lrange $runq 1 end] + set repeat [eval $script] + unset currunq + set t1 [clock clicks -milliseconds] + set t [expr {$t1 - $t0}] + if {$repeat ne {} && $repeat} { + if {$fd eq {} || $repeat == 2} { + # script returns 1 if it wants to be readded + # file readers return 2 if they could do more straight away + lappend runq [list $fd $script] + } else { + fileevent $fd readable [list filereadable $fd $script] + } + } elseif {$fd eq {}} { + unset isonrunq($script) + } + set t0 $t1 + if {$t1 - $tstart >= 80} break + } + if {$runq ne {}} { + after idle dorunq + } +} + +proc reg_instance {fd} { + global commfd leftover loginstance + + set i [incr loginstance] + set commfd($i) $fd + set leftover($i) {} + return $i +} + +proc unmerged_files {files} { + global nr_unmerged + + # find the list of unmerged files + set mlist {} + set nr_unmerged 0 + if {[catch { + set fd [open "| git ls-files -u" r] + } err]} { + show_error {} . "[mc "Couldn't get list of unmerged files:"] $err" + exit 1 + } + while {[gets $fd line] >= 0} { + set i [string first "\t" $line] + if {$i < 0} continue + set fname [string range $line [expr {$i+1}] end] + if {[lsearch -exact $mlist $fname] >= 0} continue + incr nr_unmerged + if {$files eq {} || [path_filter $files $fname]} { + lappend mlist $fname + } + } + catch {close $fd} + return $mlist +} + +proc parseviewargs {n arglist} { + global vdatemode vmergeonly vflags vdflags vrevs vfiltered vorigargs env + global worddiff git_version + + set vdatemode($n) 0 + set vmergeonly($n) 0 + set glflags {} + set diffargs {} + set nextisval 0 + set revargs {} + set origargs $arglist + set allknown 1 + set filtered 0 + set i -1 + foreach arg $arglist { + incr i + if {$nextisval} { + lappend glflags $arg + set nextisval 0 + continue + } + switch -glob -- $arg { + "-d" - + "--date-order" { + set vdatemode($n) 1 + # remove from origargs in case we hit an unknown option + set origargs [lreplace $origargs $i $i] + incr i -1 + } + "-[puabwcrRBMC]" - + "--no-renames" - "--full-index" - "--binary" - "--abbrev=*" - + "--find-copies-harder" - "-l*" - "--ext-diff" - "--no-ext-diff" - + "--src-prefix=*" - "--dst-prefix=*" - "--no-prefix" - + "-O*" - "--text" - "--full-diff" - "--ignore-space-at-eol" - + "--ignore-space-change" - "-U*" - "--unified=*" { + # These request or affect diff output, which we don't want. + # Some could be used to set our defaults for diff display. + lappend diffargs $arg + } + "--raw" - "--patch-with-raw" - "--patch-with-stat" - + "--name-only" - "--name-status" - "--color" - + "--log-size" - "--pretty=*" - "--decorate" - "--abbrev-commit" - + "--cc" - "-z" - "--header" - "--parents" - "--boundary" - + "--no-color" - "-g" - "--walk-reflogs" - "--no-walk" - + "--timestamp" - "relative-date" - "--date=*" - "--stdin" - + "--objects" - "--objects-edge" - "--reverse" { + # These cause our parsing of git log's output to fail, or else + # they're options we want to set ourselves, so ignore them. + } + "--color-words*" - "--word-diff=color" { + # These trigger a word diff in the console interface, + # so help the user by enabling our own support + if {[package vcompare $git_version "1.7.2"] >= 0} { + set worddiff [mc "Color words"] + } + } + "--word-diff*" { + if {[package vcompare $git_version "1.7.2"] >= 0} { + set worddiff [mc "Markup words"] + } + } + "--stat=*" - "--numstat" - "--shortstat" - "--summary" - + "--check" - "--exit-code" - "--quiet" - "--topo-order" - + "--full-history" - "--dense" - "--sparse" - + "--follow" - "--left-right" - "--encoding=*" { + # These are harmless, and some are even useful + lappend glflags $arg + } + "--diff-filter=*" - "--no-merges" - "--unpacked" - + "--max-count=*" - "--skip=*" - "--since=*" - "--after=*" - + "--until=*" - "--before=*" - "--max-age=*" - "--min-age=*" - + "--author=*" - "--committer=*" - "--grep=*" - "-[iE]" - + "--remove-empty" - "--first-parent" - "--cherry-pick" - + "-S*" - "--pickaxe-all" - "--pickaxe-regex" - + "--simplify-by-decoration" { + # These mean that we get a subset of the commits + set filtered 1 + lappend glflags $arg + } + "-n" { + # This appears to be the only one that has a value as a + # separate word following it + set filtered 1 + set nextisval 1 + lappend glflags $arg + } + "--not" - "--all" { + lappend revargs $arg + } + "--merge" { + set vmergeonly($n) 1 + # git rev-parse doesn't understand --merge + lappend revargs --gitk-symmetric-diff-marker MERGE_HEAD...HEAD + } + "--no-replace-objects" { + set env(GIT_NO_REPLACE_OBJECTS) "1" + } + "-*" { + # Other flag arguments including - + if {[string is digit -strict [string range $arg 1 end]]} { + set filtered 1 + } else { + # a flag argument that we don't recognize; + # that means we can't optimize + set allknown 0 + } + lappend glflags $arg + } + default { + # Non-flag arguments specify commits or ranges of commits + if {[string match "*...*" $arg]} { + lappend revargs --gitk-symmetric-diff-marker + } + lappend revargs $arg + } + } + } + set vdflags($n) $diffargs + set vflags($n) $glflags + set vrevs($n) $revargs + set vfiltered($n) $filtered + set vorigargs($n) $origargs + return $allknown +} + +proc parseviewrevs {view revs} { + global vposids vnegids + + if {$revs eq {}} { + set revs HEAD + } + if {[catch {set ids [eval exec git rev-parse $revs]} err]} { + # we get stdout followed by stderr in $err + # for an unknown rev, git rev-parse echoes it and then errors out + set errlines [split $err "\n"] + set badrev {} + for {set l 0} {$l < [llength $errlines]} {incr l} { + set line [lindex $errlines $l] + if {!([string length $line] == 40 && [string is xdigit $line])} { + if {[string match "fatal:*" $line]} { + if {[string match "fatal: ambiguous argument*" $line] + && $badrev ne {}} { + if {[llength $badrev] == 1} { + set err "unknown revision $badrev" + } else { + set err "unknown revisions: [join $badrev ", "]" + } + } else { + set err [join [lrange $errlines $l end] "\n"] + } + break + } + lappend badrev $line + } + } + error_popup "[mc "Error parsing revisions:"] $err" + return {} + } + set ret {} + set pos {} + set neg {} + set sdm 0 + foreach id [split $ids "\n"] { + if {$id eq "--gitk-symmetric-diff-marker"} { + set sdm 4 + } elseif {[string match "^*" $id]} { + if {$sdm != 1} { + lappend ret $id + if {$sdm == 3} { + set sdm 0 + } + } + lappend neg [string range $id 1 end] + } else { + if {$sdm != 2} { + lappend ret $id + } else { + lset ret end $id...[lindex $ret end] + } + lappend pos $id + } + incr sdm -1 + } + set vposids($view) $pos + set vnegids($view) $neg + return $ret +} + +# Start off a git log process and arrange to read its output +proc start_rev_list {view} { + global startmsecs commitidx viewcomplete curview + global tclencoding + global viewargs viewargscmd viewfiles vfilelimit + global showlocalchanges + global viewactive viewinstances vmergeonly + global mainheadid viewmainheadid viewmainheadid_orig + global vcanopt vflags vrevs vorigargs + global show_notes + + set startmsecs [clock clicks -milliseconds] + set commitidx($view) 0 + # these are set this way for the error exits + set viewcomplete($view) 1 + set viewactive($view) 0 + varcinit $view + + set args $viewargs($view) + if {$viewargscmd($view) ne {}} { + if {[catch { + set str [exec sh -c $viewargscmd($view)] + } err]} { + error_popup "[mc "Error executing --argscmd command:"] $err" + return 0 + } + set args [concat $args [split $str "\n"]] + } + set vcanopt($view) [parseviewargs $view $args] + + set files $viewfiles($view) + if {$vmergeonly($view)} { + set files [unmerged_files $files] + if {$files eq {}} { + global nr_unmerged + if {$nr_unmerged == 0} { + error_popup [mc "No files selected: --merge specified but\ + no files are unmerged."] + } else { + error_popup [mc "No files selected: --merge specified but\ + no unmerged files are within file limit."] + } + return 0 + } + } + set vfilelimit($view) $files + + if {$vcanopt($view)} { + set revs [parseviewrevs $view $vrevs($view)] + if {$revs eq {}} { + return 0 + } + set args [concat $vflags($view) $revs] + } else { + set args $vorigargs($view) + } + + if {[catch { + set fd [open [concat | git log --no-color -z --pretty=raw $show_notes \ + --parents --boundary $args "--" $files] r] + } err]} { + error_popup "[mc "Error executing git log:"] $err" + return 0 + } + set i [reg_instance $fd] + set viewinstances($view) [list $i] + set viewmainheadid($view) $mainheadid + set viewmainheadid_orig($view) $mainheadid + if {$files ne {} && $mainheadid ne {}} { + get_viewmainhead $view + } + if {$showlocalchanges && $viewmainheadid($view) ne {}} { + interestedin $viewmainheadid($view) dodiffindex + } + fconfigure $fd -blocking 0 -translation lf -eofchar {} + if {$tclencoding != {}} { + fconfigure $fd -encoding $tclencoding + } + filerun $fd [list getcommitlines $fd $i $view 0] + nowbusy $view [mc "Reading"] + set viewcomplete($view) 0 + set viewactive($view) 1 + return 1 +} + +proc stop_instance {inst} { + global commfd leftover + + set fd $commfd($inst) + catch { + set pid [pid $fd] + + if {$::tcl_platform(platform) eq {windows}} { + exec kill -f $pid + } else { + exec kill $pid + } + } + catch {close $fd} + nukefile $fd + unset commfd($inst) + unset leftover($inst) +} + +proc stop_backends {} { + global commfd + + foreach inst [array names commfd] { + stop_instance $inst + } +} + +proc stop_rev_list {view} { + global viewinstances + + foreach inst $viewinstances($view) { + stop_instance $inst + } + set viewinstances($view) {} +} + +proc reset_pending_select {selid} { + global pending_select mainheadid selectheadid + + if {$selid ne {}} { + set pending_select $selid + } elseif {$selectheadid ne {}} { + set pending_select $selectheadid + } else { + set pending_select $mainheadid + } +} + +proc getcommits {selid} { + global canv curview need_redisplay viewactive + + initlayout + if {[start_rev_list $curview]} { + reset_pending_select $selid + show_status [mc "Reading commits..."] + set need_redisplay 1 + } else { + show_status [mc "No commits selected"] + } +} + +proc updatecommits {} { + global curview vcanopt vorigargs vfilelimit viewinstances + global viewactive viewcomplete tclencoding + global startmsecs showneartags showlocalchanges + global mainheadid viewmainheadid viewmainheadid_orig pending_select + global isworktree + global varcid vposids vnegids vflags vrevs + global show_notes + + set isworktree [expr {[exec git rev-parse --is-inside-work-tree] == "true"}] + rereadrefs + set view $curview + if {$mainheadid ne $viewmainheadid_orig($view)} { + if {$showlocalchanges} { + dohidelocalchanges + } + set viewmainheadid($view) $mainheadid + set viewmainheadid_orig($view) $mainheadid + if {$vfilelimit($view) ne {}} { + get_viewmainhead $view + } + } + if {$showlocalchanges} { + doshowlocalchanges + } + if {$vcanopt($view)} { + set oldpos $vposids($view) + set oldneg $vnegids($view) + set revs [parseviewrevs $view $vrevs($view)] + if {$revs eq {}} { + return + } + # note: getting the delta when negative refs change is hard, + # and could require multiple git log invocations, so in that + # case we ask git log for all the commits (not just the delta) + if {$oldneg eq $vnegids($view)} { + set newrevs {} + set npos 0 + # take out positive refs that we asked for before or + # that we have already seen + foreach rev $revs { + if {[string length $rev] == 40} { + if {[lsearch -exact $oldpos $rev] < 0 + && ![info exists varcid($view,$rev)]} { + lappend newrevs $rev + incr npos + } + } else { + lappend $newrevs $rev + } + } + if {$npos == 0} return + set revs $newrevs + set vposids($view) [lsort -unique [concat $oldpos $vposids($view)]] + } + set args [concat $vflags($view) $revs --not $oldpos] + } else { + set args $vorigargs($view) + } + if {[catch { + set fd [open [concat | git log --no-color -z --pretty=raw $show_notes \ + --parents --boundary $args "--" $vfilelimit($view)] r] + } err]} { + error_popup "[mc "Error executing git log:"] $err" + return + } + if {$viewactive($view) == 0} { + set startmsecs [clock clicks -milliseconds] + } + set i [reg_instance $fd] + lappend viewinstances($view) $i + fconfigure $fd -blocking 0 -translation lf -eofchar {} + if {$tclencoding != {}} { + fconfigure $fd -encoding $tclencoding + } + filerun $fd [list getcommitlines $fd $i $view 1] + incr viewactive($view) + set viewcomplete($view) 0 + reset_pending_select {} + nowbusy $view [mc "Reading"] + if {$showneartags} { + getallcommits + } +} + +proc reloadcommits {} { + global curview viewcomplete selectedline currentid thickerline + global showneartags treediffs commitinterest cached_commitrow + global targetid + + set selid {} + if {$selectedline ne {}} { + set selid $currentid + } + + if {!$viewcomplete($curview)} { + stop_rev_list $curview + } + resetvarcs $curview + set selectedline {} + catch {unset currentid} + catch {unset thickerline} + catch {unset treediffs} + readrefs + changedrefs + if {$showneartags} { + getallcommits + } + clear_display + catch {unset commitinterest} + catch {unset cached_commitrow} + catch {unset targetid} + setcanvscroll + getcommits $selid + return 0 +} + +# This makes a string representation of a positive integer which +# sorts as a string in numerical order +proc strrep {n} { + if {$n < 16} { + return [format "%x" $n] + } elseif {$n < 256} { + return [format "x%.2x" $n] + } elseif {$n < 65536} { + return [format "y%.4x" $n] + } + return [format "z%.8x" $n] +} + +# Procedures used in reordering commits from git log (without +# --topo-order) into the order for display. + +proc varcinit {view} { + global varcstart vupptr vdownptr vleftptr vbackptr varctok varcrow + global vtokmod varcmod vrowmod varcix vlastins + + set varcstart($view) {{}} + set vupptr($view) {0} + set vdownptr($view) {0} + set vleftptr($view) {0} + set vbackptr($view) {0} + set varctok($view) {{}} + set varcrow($view) {{}} + set vtokmod($view) {} + set varcmod($view) 0 + set vrowmod($view) 0 + set varcix($view) {{}} + set vlastins($view) {0} +} + +proc resetvarcs {view} { + global varcid varccommits parents children vseedcount ordertok + + foreach vid [array names varcid $view,*] { + unset varcid($vid) + unset children($vid) + unset parents($vid) + } + # some commits might have children but haven't been seen yet + foreach vid [array names children $view,*] { + unset children($vid) + } + foreach va [array names varccommits $view,*] { + unset varccommits($va) + } + foreach vd [array names vseedcount $view,*] { + unset vseedcount($vd) + } + catch {unset ordertok} +} + +# returns a list of the commits with no children +proc seeds {v} { + global vdownptr vleftptr varcstart + + set ret {} + set a [lindex $vdownptr($v) 0] + while {$a != 0} { + lappend ret [lindex $varcstart($v) $a] + set a [lindex $vleftptr($v) $a] + } + return $ret +} + +proc newvarc {view id} { + global varcid varctok parents children vdatemode + global vupptr vdownptr vleftptr vbackptr varcrow varcix varcstart + global commitdata commitinfo vseedcount varccommits vlastins + + set a [llength $varctok($view)] + set vid $view,$id + if {[llength $children($vid)] == 0 || $vdatemode($view)} { + if {![info exists commitinfo($id)]} { + parsecommit $id $commitdata($id) 1 + } + set cdate [lindex $commitinfo($id) 4] + if {![string is integer -strict $cdate]} { + set cdate 0 + } + if {![info exists vseedcount($view,$cdate)]} { + set vseedcount($view,$cdate) -1 + } + set c [incr vseedcount($view,$cdate)] + set cdate [expr {$cdate ^ 0xffffffff}] + set tok "s[strrep $cdate][strrep $c]" + } else { + set tok {} + } + set ka 0 + if {[llength $children($vid)] > 0} { + set kid [lindex $children($vid) end] + set k $varcid($view,$kid) + if {[string compare [lindex $varctok($view) $k] $tok] > 0} { + set ki $kid + set ka $k + set tok [lindex $varctok($view) $k] + } + } + if {$ka != 0} { + set i [lsearch -exact $parents($view,$ki) $id] + set j [expr {[llength $parents($view,$ki)] - 1 - $i}] + append tok [strrep $j] + } + set c [lindex $vlastins($view) $ka] + if {$c == 0 || [string compare $tok [lindex $varctok($view) $c]] < 0} { + set c $ka + set b [lindex $vdownptr($view) $ka] + } else { + set b [lindex $vleftptr($view) $c] + } + while {$b != 0 && [string compare $tok [lindex $varctok($view) $b]] >= 0} { + set c $b + set b [lindex $vleftptr($view) $c] + } + if {$c == $ka} { + lset vdownptr($view) $ka $a + lappend vbackptr($view) 0 + } else { + lset vleftptr($view) $c $a + lappend vbackptr($view) $c + } + lset vlastins($view) $ka $a + lappend vupptr($view) $ka + lappend vleftptr($view) $b + if {$b != 0} { + lset vbackptr($view) $b $a + } + lappend varctok($view) $tok + lappend varcstart($view) $id + lappend vdownptr($view) 0 + lappend varcrow($view) {} + lappend varcix($view) {} + set varccommits($view,$a) {} + lappend vlastins($view) 0 + return $a +} + +proc splitvarc {p v} { + global varcid varcstart varccommits varctok vtokmod + global vupptr vdownptr vleftptr vbackptr varcix varcrow vlastins + + set oa $varcid($v,$p) + set otok [lindex $varctok($v) $oa] + set ac $varccommits($v,$oa) + set i [lsearch -exact $varccommits($v,$oa) $p] + if {$i <= 0} return + set na [llength $varctok($v)] + # "%" sorts before "0"... + set tok "$otok%[strrep $i]" + lappend varctok($v) $tok + lappend varcrow($v) {} + lappend varcix($v) {} + set varccommits($v,$oa) [lrange $ac 0 [expr {$i - 1}]] + set varccommits($v,$na) [lrange $ac $i end] + lappend varcstart($v) $p + foreach id $varccommits($v,$na) { + set varcid($v,$id) $na + } + lappend vdownptr($v) [lindex $vdownptr($v) $oa] + lappend vlastins($v) [lindex $vlastins($v) $oa] + lset vdownptr($v) $oa $na + lset vlastins($v) $oa 0 + lappend vupptr($v) $oa + lappend vleftptr($v) 0 + lappend vbackptr($v) 0 + for {set b [lindex $vdownptr($v) $na]} {$b != 0} {set b [lindex $vleftptr($v) $b]} { + lset vupptr($v) $b $na + } + if {[string compare $otok $vtokmod($v)] <= 0} { + modify_arc $v $oa + } +} + +proc renumbervarc {a v} { + global parents children varctok varcstart varccommits + global vupptr vdownptr vleftptr vbackptr vlastins varcid vtokmod vdatemode + + set t1 [clock clicks -milliseconds] + set todo {} + set isrelated($a) 1 + set kidchanged($a) 1 + set ntot 0 + while {$a != 0} { + if {[info exists isrelated($a)]} { + lappend todo $a + set id [lindex $varccommits($v,$a) end] + foreach p $parents($v,$id) { + if {[info exists varcid($v,$p)]} { + set isrelated($varcid($v,$p)) 1 + } + } + } + incr ntot + set b [lindex $vdownptr($v) $a] + if {$b == 0} { + while {$a != 0} { + set b [lindex $vleftptr($v) $a] + if {$b != 0} break + set a [lindex $vupptr($v) $a] + } + } + set a $b + } + foreach a $todo { + if {![info exists kidchanged($a)]} continue + set id [lindex $varcstart($v) $a] + if {[llength $children($v,$id)] > 1} { + set children($v,$id) [lsort -command [list vtokcmp $v] \ + $children($v,$id)] + } + set oldtok [lindex $varctok($v) $a] + if {!$vdatemode($v)} { + set tok {} + } else { + set tok $oldtok + } + set ka 0 + set kid [last_real_child $v,$id] + if {$kid ne {}} { + set k $varcid($v,$kid) + if {[string compare [lindex $varctok($v) $k] $tok] > 0} { + set ki $kid + set ka $k + set tok [lindex $varctok($v) $k] + } + } + if {$ka != 0} { + set i [lsearch -exact $parents($v,$ki) $id] + set j [expr {[llength $parents($v,$ki)] - 1 - $i}] + append tok [strrep $j] + } + if {$tok eq $oldtok} { + continue + } + set id [lindex $varccommits($v,$a) end] + foreach p $parents($v,$id) { + if {[info exists varcid($v,$p)]} { + set kidchanged($varcid($v,$p)) 1 + } else { + set sortkids($p) 1 + } + } + lset varctok($v) $a $tok + set b [lindex $vupptr($v) $a] + if {$b != $ka} { + if {[string compare [lindex $varctok($v) $ka] $vtokmod($v)] < 0} { + modify_arc $v $ka + } + if {[string compare [lindex $varctok($v) $b] $vtokmod($v)] < 0} { + modify_arc $v $b + } + set c [lindex $vbackptr($v) $a] + set d [lindex $vleftptr($v) $a] + if {$c == 0} { + lset vdownptr($v) $b $d + } else { + lset vleftptr($v) $c $d + } + if {$d != 0} { + lset vbackptr($v) $d $c + } + if {[lindex $vlastins($v) $b] == $a} { + lset vlastins($v) $b $c + } + lset vupptr($v) $a $ka + set c [lindex $vlastins($v) $ka] + if {$c == 0 || \ + [string compare $tok [lindex $varctok($v) $c]] < 0} { + set c $ka + set b [lindex $vdownptr($v) $ka] + } else { + set b [lindex $vleftptr($v) $c] + } + while {$b != 0 && \ + [string compare $tok [lindex $varctok($v) $b]] >= 0} { + set c $b + set b [lindex $vleftptr($v) $c] + } + if {$c == $ka} { + lset vdownptr($v) $ka $a + lset vbackptr($v) $a 0 + } else { + lset vleftptr($v) $c $a + lset vbackptr($v) $a $c + } + lset vleftptr($v) $a $b + if {$b != 0} { + lset vbackptr($v) $b $a + } + lset vlastins($v) $ka $a + } + } + foreach id [array names sortkids] { + if {[llength $children($v,$id)] > 1} { + set children($v,$id) [lsort -command [list vtokcmp $v] \ + $children($v,$id)] + } + } + set t2 [clock clicks -milliseconds] + #puts "renumbervarc did [llength $todo] of $ntot arcs in [expr {$t2-$t1}]ms" +} + +# Fix up the graph after we have found out that in view $v, +# $p (a commit that we have already seen) is actually the parent +# of the last commit in arc $a. +proc fix_reversal {p a v} { + global varcid varcstart varctok vupptr + + set pa $varcid($v,$p) + if {$p ne [lindex $varcstart($v) $pa]} { + splitvarc $p $v + set pa $varcid($v,$p) + } + # seeds always need to be renumbered + if {[lindex $vupptr($v) $pa] == 0 || + [string compare [lindex $varctok($v) $a] \ + [lindex $varctok($v) $pa]] > 0} { + renumbervarc $pa $v + } +} + +proc insertrow {id p v} { + global cmitlisted children parents varcid varctok vtokmod + global varccommits ordertok commitidx numcommits curview + global targetid targetrow + + readcommit $id + set vid $v,$id + set cmitlisted($vid) 1 + set children($vid) {} + set parents($vid) [list $p] + set a [newvarc $v $id] + set varcid($vid) $a + if {[string compare [lindex $varctok($v) $a] $vtokmod($v)] < 0} { + modify_arc $v $a + } + lappend varccommits($v,$a) $id + set vp $v,$p + if {[llength [lappend children($vp) $id]] > 1} { + set children($vp) [lsort -command [list vtokcmp $v] $children($vp)] + catch {unset ordertok} + } + fix_reversal $p $a $v + incr commitidx($v) + if {$v == $curview} { + set numcommits $commitidx($v) + setcanvscroll + if {[info exists targetid]} { + if {![comes_before $targetid $p]} { + incr targetrow + } + } + } +} + +proc insertfakerow {id p} { + global varcid varccommits parents children cmitlisted + global commitidx varctok vtokmod targetid targetrow curview numcommits + + set v $curview + set a $varcid($v,$p) + set i [lsearch -exact $varccommits($v,$a) $p] + if {$i < 0} { + puts "oops: insertfakerow can't find [shortids $p] on arc $a" + return + } + set children($v,$id) {} + set parents($v,$id) [list $p] + set varcid($v,$id) $a + lappend children($v,$p) $id + set cmitlisted($v,$id) 1 + set numcommits [incr commitidx($v)] + # note we deliberately don't update varcstart($v) even if $i == 0 + set varccommits($v,$a) [linsert $varccommits($v,$a) $i $id] + modify_arc $v $a $i + if {[info exists targetid]} { + if {![comes_before $targetid $p]} { + incr targetrow + } + } + setcanvscroll + drawvisible +} + +proc removefakerow {id} { + global varcid varccommits parents children commitidx + global varctok vtokmod cmitlisted currentid selectedline + global targetid curview numcommits + + set v $curview + if {[llength $parents($v,$id)] != 1} { + puts "oops: removefakerow [shortids $id] has [llength $parents($v,$id)] parents" + return + } + set p [lindex $parents($v,$id) 0] + set a $varcid($v,$id) + set i [lsearch -exact $varccommits($v,$a) $id] + if {$i < 0} { + puts "oops: removefakerow can't find [shortids $id] on arc $a" + return + } + unset varcid($v,$id) + set varccommits($v,$a) [lreplace $varccommits($v,$a) $i $i] + unset parents($v,$id) + unset children($v,$id) + unset cmitlisted($v,$id) + set numcommits [incr commitidx($v) -1] + set j [lsearch -exact $children($v,$p) $id] + if {$j >= 0} { + set children($v,$p) [lreplace $children($v,$p) $j $j] + } + modify_arc $v $a $i + if {[info exist currentid] && $id eq $currentid} { + unset currentid + set selectedline {} + } + if {[info exists targetid] && $targetid eq $id} { + set targetid $p + } + setcanvscroll + drawvisible +} + +proc real_children {vp} { + global children nullid nullid2 + + set kids {} + foreach id $children($vp) { + if {$id ne $nullid && $id ne $nullid2} { + lappend kids $id + } + } + return $kids +} + +proc first_real_child {vp} { + global children nullid nullid2 + + foreach id $children($vp) { + if {$id ne $nullid && $id ne $nullid2} { + return $id + } + } + return {} +} + +proc last_real_child {vp} { + global children nullid nullid2 + + set kids $children($vp) + for {set i [llength $kids]} {[incr i -1] >= 0} {} { + set id [lindex $kids $i] + if {$id ne $nullid && $id ne $nullid2} { + return $id + } + } + return {} +} + +proc vtokcmp {v a b} { + global varctok varcid + + return [string compare [lindex $varctok($v) $varcid($v,$a)] \ + [lindex $varctok($v) $varcid($v,$b)]] +} + +# This assumes that if lim is not given, the caller has checked that +# arc a's token is less than $vtokmod($v) +proc modify_arc {v a {lim {}}} { + global varctok vtokmod varcmod varcrow vupptr curview vrowmod varccommits + + if {$lim ne {}} { + set c [string compare [lindex $varctok($v) $a] $vtokmod($v)] + if {$c > 0} return + if {$c == 0} { + set r [lindex $varcrow($v) $a] + if {$r ne {} && $vrowmod($v) <= $r + $lim} return + } + } + set vtokmod($v) [lindex $varctok($v) $a] + set varcmod($v) $a + if {$v == $curview} { + while {$a != 0 && [lindex $varcrow($v) $a] eq {}} { + set a [lindex $vupptr($v) $a] + set lim {} + } + set r 0 + if {$a != 0} { + if {$lim eq {}} { + set lim [llength $varccommits($v,$a)] + } + set r [expr {[lindex $varcrow($v) $a] + $lim}] + } + set vrowmod($v) $r + undolayout $r + } +} + +proc update_arcrows {v} { + global vtokmod varcmod vrowmod varcrow commitidx currentid selectedline + global varcid vrownum varcorder varcix varccommits + global vupptr vdownptr vleftptr varctok + global displayorder parentlist curview cached_commitrow + + if {$vrowmod($v) == $commitidx($v)} return + if {$v == $curview} { + if {[llength $displayorder] > $vrowmod($v)} { + set displayorder [lrange $displayorder 0 [expr {$vrowmod($v) - 1}]] + set parentlist [lrange $parentlist 0 [expr {$vrowmod($v) - 1}]] + } + catch {unset cached_commitrow} + } + set narctot [expr {[llength $varctok($v)] - 1}] + set a $varcmod($v) + while {$a != 0 && [lindex $varcix($v) $a] eq {}} { + # go up the tree until we find something that has a row number, + # or we get to a seed + set a [lindex $vupptr($v) $a] + } + if {$a == 0} { + set a [lindex $vdownptr($v) 0] + if {$a == 0} return + set vrownum($v) {0} + set varcorder($v) [list $a] + lset varcix($v) $a 0 + lset varcrow($v) $a 0 + set arcn 0 + set row 0 + } else { + set arcn [lindex $varcix($v) $a] + if {[llength $vrownum($v)] > $arcn + 1} { + set vrownum($v) [lrange $vrownum($v) 0 $arcn] + set varcorder($v) [lrange $varcorder($v) 0 $arcn] + } + set row [lindex $varcrow($v) $a] + } + while {1} { + set p $a + incr row [llength $varccommits($v,$a)] + # go down if possible + set b [lindex $vdownptr($v) $a] + if {$b == 0} { + # if not, go left, or go up until we can go left + while {$a != 0} { + set b [lindex $vleftptr($v) $a] + if {$b != 0} break + set a [lindex $vupptr($v) $a] + } + if {$a == 0} break + } + set a $b + incr arcn + lappend vrownum($v) $row + lappend varcorder($v) $a + lset varcix($v) $a $arcn + lset varcrow($v) $a $row + } + set vtokmod($v) [lindex $varctok($v) $p] + set varcmod($v) $p + set vrowmod($v) $row + if {[info exists currentid]} { + set selectedline [rowofcommit $currentid] + } +} + +# Test whether view $v contains commit $id +proc commitinview {id v} { + global varcid + + return [info exists varcid($v,$id)] +} + +# Return the row number for commit $id in the current view +proc rowofcommit {id} { + global varcid varccommits varcrow curview cached_commitrow + global varctok vtokmod + + set v $curview + if {![info exists varcid($v,$id)]} { + puts "oops rowofcommit no arc for [shortids $id]" + return {} + } + set a $varcid($v,$id) + if {[string compare [lindex $varctok($v) $a] $vtokmod($v)] >= 0} { + update_arcrows $v + } + if {[info exists cached_commitrow($id)]} { + return $cached_commitrow($id) + } + set i [lsearch -exact $varccommits($v,$a) $id] + if {$i < 0} { + puts "oops didn't find commit [shortids $id] in arc $a" + return {} + } + incr i [lindex $varcrow($v) $a] + set cached_commitrow($id) $i + return $i +} + +# Returns 1 if a is on an earlier row than b, otherwise 0 +proc comes_before {a b} { + global varcid varctok curview + + set v $curview + if {$a eq $b || ![info exists varcid($v,$a)] || \ + ![info exists varcid($v,$b)]} { + return 0 + } + if {$varcid($v,$a) != $varcid($v,$b)} { + return [expr {[string compare [lindex $varctok($v) $varcid($v,$a)] \ + [lindex $varctok($v) $varcid($v,$b)]] < 0}] + } + return [expr {[rowofcommit $a] < [rowofcommit $b]}] +} + +proc bsearch {l elt} { + if {[llength $l] == 0 || $elt <= [lindex $l 0]} { + return 0 + } + set lo 0 + set hi [llength $l] + while {$hi - $lo > 1} { + set mid [expr {int(($lo + $hi) / 2)}] + set t [lindex $l $mid] + if {$elt < $t} { + set hi $mid + } elseif {$elt > $t} { + set lo $mid + } else { + return $mid + } + } + return $lo +} + +# Make sure rows $start..$end-1 are valid in displayorder and parentlist +proc make_disporder {start end} { + global vrownum curview commitidx displayorder parentlist + global varccommits varcorder parents vrowmod varcrow + global d_valid_start d_valid_end + + if {$end > $vrowmod($curview)} { + update_arcrows $curview + } + set ai [bsearch $vrownum($curview) $start] + set start [lindex $vrownum($curview) $ai] + set narc [llength $vrownum($curview)] + for {set r $start} {$ai < $narc && $r < $end} {incr ai} { + set a [lindex $varcorder($curview) $ai] + set l [llength $displayorder] + set al [llength $varccommits($curview,$a)] + if {$l < $r + $al} { + if {$l < $r} { + set pad [ntimes [expr {$r - $l}] {}] + set displayorder [concat $displayorder $pad] + set parentlist [concat $parentlist $pad] + } elseif {$l > $r} { + set displayorder [lrange $displayorder 0 [expr {$r - 1}]] + set parentlist [lrange $parentlist 0 [expr {$r - 1}]] + } + foreach id $varccommits($curview,$a) { + lappend displayorder $id + lappend parentlist $parents($curview,$id) + } + } elseif {[lindex $displayorder [expr {$r + $al - 1}]] eq {}} { + set i $r + foreach id $varccommits($curview,$a) { + lset displayorder $i $id + lset parentlist $i $parents($curview,$id) + incr i + } + } + incr r $al + } +} + +proc commitonrow {row} { + global displayorder + + set id [lindex $displayorder $row] + if {$id eq {}} { + make_disporder $row [expr {$row + 1}] + set id [lindex $displayorder $row] + } + return $id +} + +proc closevarcs {v} { + global varctok varccommits varcid parents children + global cmitlisted commitidx vtokmod + + set missing_parents 0 + set scripts {} + set narcs [llength $varctok($v)] + for {set a 1} {$a < $narcs} {incr a} { + set id [lindex $varccommits($v,$a) end] + foreach p $parents($v,$id) { + if {[info exists varcid($v,$p)]} continue + # add p as a new commit + incr missing_parents + set cmitlisted($v,$p) 0 + set parents($v,$p) {} + if {[llength $children($v,$p)] == 1 && + [llength $parents($v,$id)] == 1} { + set b $a + } else { + set b [newvarc $v $p] + } + set varcid($v,$p) $b + if {[string compare [lindex $varctok($v) $b] $vtokmod($v)] < 0} { + modify_arc $v $b + } + lappend varccommits($v,$b) $p + incr commitidx($v) + set scripts [check_interest $p $scripts] + } + } + if {$missing_parents > 0} { + foreach s $scripts { + eval $s + } + } +} + +# Use $rwid as a substitute for $id, i.e. reparent $id's children to $rwid +# Assumes we already have an arc for $rwid. +proc rewrite_commit {v id rwid} { + global children parents varcid varctok vtokmod varccommits + + foreach ch $children($v,$id) { + # make $rwid be $ch's parent in place of $id + set i [lsearch -exact $parents($v,$ch) $id] + if {$i < 0} { + puts "oops rewrite_commit didn't find $id in parent list for $ch" + } + set parents($v,$ch) [lreplace $parents($v,$ch) $i $i $rwid] + # add $ch to $rwid's children and sort the list if necessary + if {[llength [lappend children($v,$rwid) $ch]] > 1} { + set children($v,$rwid) [lsort -command [list vtokcmp $v] \ + $children($v,$rwid)] + } + # fix the graph after joining $id to $rwid + set a $varcid($v,$ch) + fix_reversal $rwid $a $v + # parentlist is wrong for the last element of arc $a + # even if displayorder is right, hence the 3rd arg here + modify_arc $v $a [expr {[llength $varccommits($v,$a)] - 1}] + } +} + +# Mechanism for registering a command to be executed when we come +# across a particular commit. To handle the case when only the +# prefix of the commit is known, the commitinterest array is now +# indexed by the first 4 characters of the ID. Each element is a +# list of id, cmd pairs. +proc interestedin {id cmd} { + global commitinterest + + lappend commitinterest([string range $id 0 3]) $id $cmd +} + +proc check_interest {id scripts} { + global commitinterest + + set prefix [string range $id 0 3] + if {[info exists commitinterest($prefix)]} { + set newlist {} + foreach {i script} $commitinterest($prefix) { + if {[string match "$i*" $id]} { + lappend scripts [string map [list "%I" $id "%P" $i] $script] + } else { + lappend newlist $i $script + } + } + if {$newlist ne {}} { + set commitinterest($prefix) $newlist + } else { + unset commitinterest($prefix) + } + } + return $scripts +} + +proc getcommitlines {fd inst view updating} { + global cmitlisted leftover + global commitidx commitdata vdatemode + global parents children curview hlview + global idpending ordertok + global varccommits varcid varctok vtokmod vfilelimit + + set stuff [read $fd 500000] + # git log doesn't terminate the last commit with a null... + if {$stuff == {} && $leftover($inst) ne {} && [eof $fd]} { + set stuff "\0" + } + if {$stuff == {}} { + if {![eof $fd]} { + return 1 + } + global commfd viewcomplete viewactive viewname + global viewinstances + unset commfd($inst) + set i [lsearch -exact $viewinstances($view) $inst] + if {$i >= 0} { + set viewinstances($view) [lreplace $viewinstances($view) $i $i] + } + # set it blocking so we wait for the process to terminate + fconfigure $fd -blocking 1 + if {[catch {close $fd} err]} { + set fv {} + if {$view != $curview} { + set fv " for the \"$viewname($view)\" view" + } + if {[string range $err 0 4] == "usage"} { + set err "Gitk: error reading commits$fv:\ + bad arguments to git log." + if {$viewname($view) eq "Command line"} { + append err \ + " (Note: arguments to gitk are passed to git log\ + to allow selection of commits to be displayed.)" + } + } else { + set err "Error reading commits$fv: $err" + } + error_popup $err + } + if {[incr viewactive($view) -1] <= 0} { + set viewcomplete($view) 1 + # Check if we have seen any ids listed as parents that haven't + # appeared in the list + closevarcs $view + notbusy $view + } + if {$view == $curview} { + run chewcommits + } + return 0 + } + set start 0 + set gotsome 0 + set scripts {} + while 1 { + set i [string first "\0" $stuff $start] + if {$i < 0} { + append leftover($inst) [string range $stuff $start end] + break + } + if {$start == 0} { + set cmit $leftover($inst) + append cmit [string range $stuff 0 [expr {$i - 1}]] + set leftover($inst) {} + } else { + set cmit [string range $stuff $start [expr {$i - 1}]] + } + set start [expr {$i + 1}] + set j [string first "\n" $cmit] + set ok 0 + set listed 1 + if {$j >= 0 && [string match "commit *" $cmit]} { + set ids [string range $cmit 7 [expr {$j - 1}]] + if {[string match {[-^<>]*} $ids]} { + switch -- [string index $ids 0] { + "-" {set listed 0} + "^" {set listed 2} + "<" {set listed 3} + ">" {set listed 4} + } + set ids [string range $ids 1 end] + } + set ok 1 + foreach id $ids { + if {[string length $id] != 40} { + set ok 0 + break + } + } + } + if {!$ok} { + set shortcmit $cmit + if {[string length $shortcmit] > 80} { + set shortcmit "[string range $shortcmit 0 80]..." + } + error_popup "[mc "Can't parse git log output:"] {$shortcmit}" + exit 1 + } + set id [lindex $ids 0] + set vid $view,$id + + if {!$listed && $updating && ![info exists varcid($vid)] && + $vfilelimit($view) ne {}} { + # git log doesn't rewrite parents for unlisted commits + # when doing path limiting, so work around that here + # by working out the rewritten parent with git rev-list + # and if we already know about it, using the rewritten + # parent as a substitute parent for $id's children. + if {![catch { + set rwid [exec git rev-list --first-parent --max-count=1 \ + $id -- $vfilelimit($view)] + }]} { + if {$rwid ne {} && [info exists varcid($view,$rwid)]} { + # use $rwid in place of $id + rewrite_commit $view $id $rwid + continue + } + } + } + + set a 0 + if {[info exists varcid($vid)]} { + if {$cmitlisted($vid) || !$listed} continue + set a $varcid($vid) + } + if {$listed} { + set olds [lrange $ids 1 end] + } else { + set olds {} + } + set commitdata($id) [string range $cmit [expr {$j + 1}] end] + set cmitlisted($vid) $listed + set parents($vid) $olds + if {![info exists children($vid)]} { + set children($vid) {} + } elseif {$a == 0 && [llength $children($vid)] == 1} { + set k [lindex $children($vid) 0] + if {[llength $parents($view,$k)] == 1 && + (!$vdatemode($view) || + $varcid($view,$k) == [llength $varctok($view)] - 1)} { + set a $varcid($view,$k) + } + } + if {$a == 0} { + # new arc + set a [newvarc $view $id] + } + if {[string compare [lindex $varctok($view) $a] $vtokmod($view)] < 0} { + modify_arc $view $a + } + if {![info exists varcid($vid)]} { + set varcid($vid) $a + lappend varccommits($view,$a) $id + incr commitidx($view) + } + + set i 0 + foreach p $olds { + if {$i == 0 || [lsearch -exact $olds $p] >= $i} { + set vp $view,$p + if {[llength [lappend children($vp) $id]] > 1 && + [vtokcmp $view [lindex $children($vp) end-1] $id] > 0} { + set children($vp) [lsort -command [list vtokcmp $view] \ + $children($vp)] + catch {unset ordertok} + } + if {[info exists varcid($view,$p)]} { + fix_reversal $p $a $view + } + } + incr i + } + + set scripts [check_interest $id $scripts] + set gotsome 1 + } + if {$gotsome} { + global numcommits hlview + + if {$view == $curview} { + set numcommits $commitidx($view) + run chewcommits + } + if {[info exists hlview] && $view == $hlview} { + # we never actually get here... + run vhighlightmore + } + foreach s $scripts { + eval $s + } + } + return 2 +} + +proc chewcommits {} { + global curview hlview viewcomplete + global pending_select + + layoutmore + if {$viewcomplete($curview)} { + global commitidx varctok + global numcommits startmsecs + + if {[info exists pending_select]} { + update + reset_pending_select {} + + if {[commitinview $pending_select $curview]} { + selectline [rowofcommit $pending_select] 1 + } else { + set row [first_real_row] + selectline $row 1 + } + } + if {$commitidx($curview) > 0} { + #set ms [expr {[clock clicks -milliseconds] - $startmsecs}] + #puts "overall $ms ms for $numcommits commits" + #puts "[llength $varctok($view)] arcs, $commitidx($view) commits" + } else { + show_status [mc "No commits selected"] + } + notbusy layout + } + return 0 +} + +proc do_readcommit {id} { + global tclencoding + + # Invoke git-log to handle automatic encoding conversion + set fd [open [concat | git log --no-color --pretty=raw -1 $id] r] + # Read the results using i18n.logoutputencoding + fconfigure $fd -translation lf -eofchar {} + if {$tclencoding != {}} { + fconfigure $fd -encoding $tclencoding + } + set contents [read $fd] + close $fd + # Remove the heading line + regsub {^commit [0-9a-f]+\n} $contents {} contents + + return $contents +} + +proc readcommit {id} { + if {[catch {set contents [do_readcommit $id]}]} return + parsecommit $id $contents 1 +} + +proc parsecommit {id contents listed} { + global commitinfo cdate + + set inhdr 1 + set comment {} + set headline {} + set auname {} + set audate {} + set comname {} + set comdate {} + set hdrend [string first "\n\n" $contents] + if {$hdrend < 0} { + # should never happen... + set hdrend [string length $contents] + } + set header [string range $contents 0 [expr {$hdrend - 1}]] + set comment [string range $contents [expr {$hdrend + 2}] end] + foreach line [split $header "\n"] { + set line [split $line " "] + set tag [lindex $line 0] + if {$tag == "author"} { + set audate [lindex $line end-1] + set auname [join [lrange $line 1 end-2] " "] + } elseif {$tag == "committer"} { + set comdate [lindex $line end-1] + set comname [join [lrange $line 1 end-2] " "] + } + } + set headline {} + # take the first non-blank line of the comment as the headline + set headline [string trimleft $comment] + set i [string first "\n" $headline] + if {$i >= 0} { + set headline [string range $headline 0 $i] + } + set headline [string trimright $headline] + set i [string first "\r" $headline] + if {$i >= 0} { + set headline [string trimright [string range $headline 0 $i]] + } + if {!$listed} { + # git log indents the comment by 4 spaces; + # if we got this via git cat-file, add the indentation + set newcomment {} + foreach line [split $comment "\n"] { + append newcomment " " + append newcomment $line + append newcomment "\n" + } + set comment $newcomment + } + if {$comdate != {}} { + set cdate($id) $comdate + } + set commitinfo($id) [list $headline $auname $audate \ + $comname $comdate $comment] +} + +proc getcommit {id} { + global commitdata commitinfo + + if {[info exists commitdata($id)]} { + parsecommit $id $commitdata($id) 1 + } else { + readcommit $id + if {![info exists commitinfo($id)]} { + set commitinfo($id) [list [mc "No commit information available"]] + } + } + return 1 +} + +# Expand an abbreviated commit ID to a list of full 40-char IDs that match +# and are present in the current view. +# This is fairly slow... +proc longid {prefix} { + global varcid curview + + set ids {} + foreach match [array names varcid "$curview,$prefix*"] { + lappend ids [lindex [split $match ","] 1] + } + return $ids +} + +proc readrefs {} { + global tagids idtags headids idheads tagobjid + global otherrefids idotherrefs mainhead mainheadid + global selecthead selectheadid + global hideremotes + + foreach v {tagids idtags headids idheads otherrefids idotherrefs} { + catch {unset $v} + } + set refd [open [list | git show-ref -d] r] + while {[gets $refd line] >= 0} { + if {[string index $line 40] ne " "} continue + set id [string range $line 0 39] + set ref [string range $line 41 end] + if {![string match "refs/*" $ref]} continue + set name [string range $ref 5 end] + if {[string match "remotes/*" $name]} { + if {![string match "*/HEAD" $name] && !$hideremotes} { + set headids($name) $id + lappend idheads($id) $name + } + } elseif {[string match "heads/*" $name]} { + set name [string range $name 6 end] + set headids($name) $id + lappend idheads($id) $name + } elseif {[string match "tags/*" $name]} { + # this lets refs/tags/foo^{} overwrite refs/tags/foo, + # which is what we want since the former is the commit ID + set name [string range $name 5 end] + if {[string match "*^{}" $name]} { + set name [string range $name 0 end-3] + } else { + set tagobjid($name) $id + } + set tagids($name) $id + lappend idtags($id) $name + } else { + set otherrefids($name) $id + lappend idotherrefs($id) $name + } + } + catch {close $refd} + set mainhead {} + set mainheadid {} + catch { + set mainheadid [exec git rev-parse HEAD] + set thehead [exec git symbolic-ref HEAD] + if {[string match "refs/heads/*" $thehead]} { + set mainhead [string range $thehead 11 end] + } + } + set selectheadid {} + if {$selecthead ne {}} { + catch { + set selectheadid [exec git rev-parse --verify $selecthead] + } + } +} + +# skip over fake commits +proc first_real_row {} { + global nullid nullid2 numcommits + + for {set row 0} {$row < $numcommits} {incr row} { + set id [commitonrow $row] + if {$id ne $nullid && $id ne $nullid2} { + break + } + } + return $row +} + +# update things for a head moved to a child of its previous location +proc movehead {id name} { + global headids idheads + + removehead $headids($name) $name + set headids($name) $id + lappend idheads($id) $name +} + +# update things when a head has been removed +proc removehead {id name} { + global headids idheads + + if {$idheads($id) eq $name} { + unset idheads($id) + } else { + set i [lsearch -exact $idheads($id) $name] + if {$i >= 0} { + set idheads($id) [lreplace $idheads($id) $i $i] + } + } + unset headids($name) +} + +proc ttk_toplevel {w args} { + global use_ttk + eval [linsert $args 0 ::toplevel $w] + if {$use_ttk} { + place [ttk::frame $w._toplevel_background] -x 0 -y 0 -relwidth 1 -relheight 1 + } + return $w +} + +proc make_transient {window origin} { + global have_tk85 + + # In MacOS Tk 8.4 transient appears to work by setting + # overrideredirect, which is utterly useless, since the + # windows get no border, and are not even kept above + # the parent. + if {!$have_tk85 && [tk windowingsystem] eq {aqua}} return + + wm transient $window $origin + + # Windows fails to place transient windows normally, so + # schedule a callback to center them on the parent. + if {[tk windowingsystem] eq {win32}} { + after idle [list tk::PlaceWindow $window widget $origin] + } +} + +proc show_error {w top msg {mc mc}} { + global NS + if {![info exists NS]} {set NS ""} + if {[wm state $top] eq "withdrawn"} { wm deiconify $top } + message $w.m -text $msg -justify center -aspect 400 + pack $w.m -side top -fill x -padx 20 -pady 20 + ${NS}::button $w.ok -default active -text [$mc OK] -command "destroy $top" + pack $w.ok -side bottom -fill x + bind $top "grab $top; focus $top" + bind $top "destroy $top" + bind $top "destroy $top" + bind $top "destroy $top" + tkwait window $top +} + +proc error_popup {msg {owner .}} { + if {[tk windowingsystem] eq "win32"} { + tk_messageBox -icon error -type ok -title [wm title .] \ + -parent $owner -message $msg + } else { + set w .error + ttk_toplevel $w + make_transient $w $owner + show_error $w $w $msg + } +} + +proc confirm_popup {msg {owner .}} { + global confirm_ok NS + set confirm_ok 0 + set w .confirm + ttk_toplevel $w + make_transient $w $owner + message $w.m -text $msg -justify center -aspect 400 + pack $w.m -side top -fill x -padx 20 -pady 20 + ${NS}::button $w.ok -text [mc OK] -command "set confirm_ok 1; destroy $w" + pack $w.ok -side left -fill x + ${NS}::button $w.cancel -text [mc Cancel] -command "destroy $w" + pack $w.cancel -side right -fill x + bind $w "grab $w; focus $w" + bind $w "set confirm_ok 1; destroy $w" + bind $w "set confirm_ok 1; destroy $w" + bind $w "destroy $w" + tk::PlaceWindow $w widget $owner + tkwait window $w + return $confirm_ok +} + +proc setoptions {} { + if {[tk windowingsystem] ne "win32"} { + option add *Panedwindow.showHandle 1 startupFile + option add *Panedwindow.sashRelief raised startupFile + if {[tk windowingsystem] ne "aqua"} { + option add *Menu.font uifont startupFile + } + } else { + option add *Menu.TearOff 0 startupFile + } + option add *Button.font uifont startupFile + option add *Checkbutton.font uifont startupFile + option add *Radiobutton.font uifont startupFile + option add *Menubutton.font uifont startupFile + option add *Label.font uifont startupFile + option add *Message.font uifont startupFile + option add *Entry.font textfont startupFile + option add *Text.font textfont startupFile + option add *Labelframe.font uifont startupFile + option add *Spinbox.font textfont startupFile + option add *Listbox.font mainfont startupFile +} + +# Make a menu and submenus. +# m is the window name for the menu, items is the list of menu items to add. +# Each item is a list {mc label type description options...} +# mc is ignored; it's so we can put mc there to alert xgettext +# label is the string that appears in the menu +# type is cascade, command or radiobutton (should add checkbutton) +# description depends on type; it's the sublist for cascade, the +# command to invoke for command, or {variable value} for radiobutton +proc makemenu {m items} { + menu $m + if {[tk windowingsystem] eq {aqua}} { + set Meta1 Cmd + } else { + set Meta1 Ctrl + } + foreach i $items { + set name [mc [lindex $i 1]] + set type [lindex $i 2] + set thing [lindex $i 3] + set params [list $type] + if {$name ne {}} { + set u [string first "&" [string map {&& x} $name]] + lappend params -label [string map {&& & & {}} $name] + if {$u >= 0} { + lappend params -underline $u + } + } + switch -- $type { + "cascade" { + set submenu [string tolower [string map {& ""} [lindex $i 1]]] + lappend params -menu $m.$submenu + } + "command" { + lappend params -command $thing + } + "radiobutton" { + lappend params -variable [lindex $thing 0] \ + -value [lindex $thing 1] + } + } + set tail [lrange $i 4 end] + regsub -all {\yMeta1\y} $tail $Meta1 tail + eval $m add $params $tail + if {$type eq "cascade"} { + makemenu $m.$submenu $thing + } + } +} + +# translate string and remove ampersands +proc mca {str} { + return [string map {&& & & {}} [mc $str]] +} + +proc makedroplist {w varname args} { + global use_ttk + if {$use_ttk} { + set width 0 + foreach label $args { + set cx [string length $label] + if {$cx > $width} {set width $cx} + } + set gm [ttk::combobox $w -width $width -state readonly\ + -textvariable $varname -values $args] + } else { + set gm [eval [linsert $args 0 tk_optionMenu $w $varname]] + } + return $gm +} + +proc makewindow {} { + global canv canv2 canv3 linespc charspc ctext cflist cscroll + global tabstop + global findtype findtypemenu findloc findstring fstring geometry + global entries sha1entry sha1string sha1but + global diffcontextstring diffcontext + global ignorespace + global maincursor textcursor curtextcursor + global rowctxmenu fakerowmenu mergemax wrapcomment + global highlight_files gdttype + global searchstring sstring + global bgcolor fgcolor bglist fglist diffcolors selectbgcolor + global headctxmenu progresscanv progressitem progresscoords statusw + global fprogitem fprogcoord lastprogupdate progupdatepending + global rprogitem rprogcoord rownumsel numcommits + global have_tk85 use_ttk NS + global git_version + global worddiff + + # The "mc" arguments here are purely so that xgettext + # sees the following string as needing to be translated + set file { + mc "File" cascade { + {mc "Update" command updatecommits -accelerator F5} + {mc "Reload" command reloadcommits -accelerator Meta1-F5} + {mc "Reread references" command rereadrefs} + {mc "List references" command showrefs -accelerator F2} + {xx "" separator} + {mc "Start git gui" command {exec git gui &}} + {xx "" separator} + {mc "Quit" command doquit -accelerator Meta1-Q} + }} + set edit { + mc "Edit" cascade { + {mc "Preferences" command doprefs} + }} + set view { + mc "View" cascade { + {mc "New view..." command {newview 0} -accelerator Shift-F4} + {mc "Edit view..." command editview -state disabled -accelerator F4} + {mc "Delete view" command delview -state disabled} + {xx "" separator} + {mc "All files" radiobutton {selectedview 0} -command {showview 0}} + }} + if {[tk windowingsystem] ne "aqua"} { + set help { + mc "Help" cascade { + {mc "About gitk" command about} + {mc "Key bindings" command keys} + }} + set bar [list $file $edit $view $help] + } else { + proc ::tk::mac::ShowPreferences {} {doprefs} + proc ::tk::mac::Quit {} {doquit} + lset file end [lreplace [lindex $file end] end-1 end] + set apple { + xx "Apple" cascade { + {mc "About gitk" command about} + {xx "" separator} + }} + set help { + mc "Help" cascade { + {mc "Key bindings" command keys} + }} + set bar [list $apple $file $view $help] + } + makemenu .bar $bar + . configure -menu .bar + + if {$use_ttk} { + # cover the non-themed toplevel with a themed frame. + place [ttk::frame ._main_background] -x 0 -y 0 -relwidth 1 -relheight 1 + } + + # the gui has upper and lower half, parts of a paned window. + ${NS}::panedwindow .ctop -orient vertical + + # possibly use assumed geometry + if {![info exists geometry(pwsash0)]} { + set geometry(topheight) [expr {15 * $linespc}] + set geometry(topwidth) [expr {80 * $charspc}] + set geometry(botheight) [expr {15 * $linespc}] + set geometry(botwidth) [expr {50 * $charspc}] + set geometry(pwsash0) [list [expr {40 * $charspc}] 2] + set geometry(pwsash1) [list [expr {60 * $charspc}] 2] + } + + # the upper half will have a paned window, a scroll bar to the right, and some stuff below + ${NS}::frame .tf -height $geometry(topheight) -width $geometry(topwidth) + ${NS}::frame .tf.histframe + ${NS}::panedwindow .tf.histframe.pwclist -orient horizontal + if {!$use_ttk} { + .tf.histframe.pwclist configure -sashpad 0 -handlesize 4 + } + + # create three canvases + set cscroll .tf.histframe.csb + set canv .tf.histframe.pwclist.canv + canvas $canv \ + -selectbackground $selectbgcolor \ + -background $bgcolor -bd 0 \ + -yscrollincr $linespc -yscrollcommand "scrollcanv $cscroll" + .tf.histframe.pwclist add $canv + set canv2 .tf.histframe.pwclist.canv2 + canvas $canv2 \ + -selectbackground $selectbgcolor \ + -background $bgcolor -bd 0 -yscrollincr $linespc + .tf.histframe.pwclist add $canv2 + set canv3 .tf.histframe.pwclist.canv3 + canvas $canv3 \ + -selectbackground $selectbgcolor \ + -background $bgcolor -bd 0 -yscrollincr $linespc + .tf.histframe.pwclist add $canv3 + if {$use_ttk} { + bind .tf.histframe.pwclist { + bind %W {} + .tf.histframe.pwclist sashpos 1 [lindex $::geometry(pwsash1) 0] + .tf.histframe.pwclist sashpos 0 [lindex $::geometry(pwsash0) 0] + } + } else { + eval .tf.histframe.pwclist sash place 0 $geometry(pwsash0) + eval .tf.histframe.pwclist sash place 1 $geometry(pwsash1) + } + + # a scroll bar to rule them + ${NS}::scrollbar $cscroll -command {allcanvs yview} + if {!$use_ttk} {$cscroll configure -highlightthickness 0} + pack $cscroll -side right -fill y + bind .tf.histframe.pwclist {resizeclistpanes %W %w} + lappend bglist $canv $canv2 $canv3 + pack .tf.histframe.pwclist -fill both -expand 1 -side left + + # we have two button bars at bottom of top frame. Bar 1 + ${NS}::frame .tf.bar + ${NS}::frame .tf.lbar -height 15 + + set sha1entry .tf.bar.sha1 + set entries $sha1entry + set sha1but .tf.bar.sha1label + button $sha1but -text "[mc "SHA1 ID:"] " -state disabled -relief flat \ + -command gotocommit -width 8 + $sha1but conf -disabledforeground [$sha1but cget -foreground] + pack .tf.bar.sha1label -side left + ${NS}::entry $sha1entry -width 40 -font textfont -textvariable sha1string + trace add variable sha1string write sha1change + pack $sha1entry -side left -pady 2 + + image create bitmap bm-left -data { + #define left_width 16 + #define left_height 16 + static unsigned char left_bits[] = { + 0x00, 0x00, 0xc0, 0x01, 0xe0, 0x00, 0x70, 0x00, 0x38, 0x00, 0x1c, 0x00, + 0x0e, 0x00, 0xff, 0x7f, 0xff, 0x7f, 0xff, 0x7f, 0x0e, 0x00, 0x1c, 0x00, + 0x38, 0x00, 0x70, 0x00, 0xe0, 0x00, 0xc0, 0x01}; + } + image create bitmap bm-right -data { + #define right_width 16 + #define right_height 16 + static unsigned char right_bits[] = { + 0x00, 0x00, 0xc0, 0x01, 0x80, 0x03, 0x00, 0x07, 0x00, 0x0e, 0x00, 0x1c, + 0x00, 0x38, 0xff, 0x7f, 0xff, 0x7f, 0xff, 0x7f, 0x00, 0x38, 0x00, 0x1c, + 0x00, 0x0e, 0x00, 0x07, 0x80, 0x03, 0xc0, 0x01}; + } + ${NS}::button .tf.bar.leftbut -image bm-left -command goback \ + -state disabled -width 26 + pack .tf.bar.leftbut -side left -fill y + ${NS}::button .tf.bar.rightbut -image bm-right -command goforw \ + -state disabled -width 26 + pack .tf.bar.rightbut -side left -fill y + + ${NS}::label .tf.bar.rowlabel -text [mc "Row"] + set rownumsel {} + ${NS}::label .tf.bar.rownum -width 7 -textvariable rownumsel \ + -relief sunken -anchor e + ${NS}::label .tf.bar.rowlabel2 -text "/" + ${NS}::label .tf.bar.numcommits -width 7 -textvariable numcommits \ + -relief sunken -anchor e + pack .tf.bar.rowlabel .tf.bar.rownum .tf.bar.rowlabel2 .tf.bar.numcommits \ + -side left + if {!$use_ttk} { + foreach w {rownum numcommits} {.tf.bar.$w configure -font textfont} + } + global selectedline + trace add variable selectedline write selectedline_change + + # Status label and progress bar + set statusw .tf.bar.status + ${NS}::label $statusw -width 15 -relief sunken + pack $statusw -side left -padx 5 + if {$use_ttk} { + set progresscanv [ttk::progressbar .tf.bar.progress] + } else { + set h [expr {[font metrics uifont -linespace] + 2}] + set progresscanv .tf.bar.progress + canvas $progresscanv -relief sunken -height $h -borderwidth 2 + set progressitem [$progresscanv create rect -1 0 0 $h -fill green] + set fprogitem [$progresscanv create rect -1 0 0 $h -fill yellow] + set rprogitem [$progresscanv create rect -1 0 0 $h -fill red] + } + pack $progresscanv -side right -expand 1 -fill x -padx {0 2} + set progresscoords {0 0} + set fprogcoord 0 + set rprogcoord 0 + bind $progresscanv adjustprogress + set lastprogupdate [clock clicks -milliseconds] + set progupdatepending 0 + + # build up the bottom bar of upper window + ${NS}::label .tf.lbar.flabel -text "[mc "Find"] " + ${NS}::button .tf.lbar.fnext -text [mc "next"] -command {dofind 1 1} + ${NS}::button .tf.lbar.fprev -text [mc "prev"] -command {dofind -1 1} + ${NS}::label .tf.lbar.flab2 -text " [mc "commit"] " + pack .tf.lbar.flabel .tf.lbar.fnext .tf.lbar.fprev .tf.lbar.flab2 \ + -side left -fill y + set gdttype [mc "containing:"] + set gm [makedroplist .tf.lbar.gdttype gdttype \ + [mc "containing:"] \ + [mc "touching paths:"] \ + [mc "adding/removing string:"]] + trace add variable gdttype write gdttype_change + pack .tf.lbar.gdttype -side left -fill y + + set findstring {} + set fstring .tf.lbar.findstring + lappend entries $fstring + ${NS}::entry $fstring -width 30 -textvariable findstring + trace add variable findstring write find_change + set findtype [mc "Exact"] + set findtypemenu [makedroplist .tf.lbar.findtype \ + findtype [mc "Exact"] [mc "IgnCase"] [mc "Regexp"]] + trace add variable findtype write findcom_change + set findloc [mc "All fields"] + makedroplist .tf.lbar.findloc findloc [mc "All fields"] [mc "Headline"] \ + [mc "Comments"] [mc "Author"] [mc "Committer"] + trace add variable findloc write find_change + pack .tf.lbar.findloc -side right + pack .tf.lbar.findtype -side right + pack $fstring -side left -expand 1 -fill x + + # Finish putting the upper half of the viewer together + pack .tf.lbar -in .tf -side bottom -fill x + pack .tf.bar -in .tf -side bottom -fill x + pack .tf.histframe -fill both -side top -expand 1 + .ctop add .tf + if {!$use_ttk} { + .ctop paneconfigure .tf -height $geometry(topheight) + .ctop paneconfigure .tf -width $geometry(topwidth) + } + + # now build up the bottom + ${NS}::panedwindow .pwbottom -orient horizontal + + # lower left, a text box over search bar, scroll bar to the right + # if we know window height, then that will set the lower text height, otherwise + # we set lower text height which will drive window height + if {[info exists geometry(main)]} { + ${NS}::frame .bleft -width $geometry(botwidth) + } else { + ${NS}::frame .bleft -width $geometry(botwidth) -height $geometry(botheight) + } + ${NS}::frame .bleft.top + ${NS}::frame .bleft.mid + ${NS}::frame .bleft.bottom + + ${NS}::button .bleft.top.search -text [mc "Search"] -command dosearch + pack .bleft.top.search -side left -padx 5 + set sstring .bleft.top.sstring + set searchstring "" + ${NS}::entry $sstring -width 20 -textvariable searchstring + lappend entries $sstring + trace add variable searchstring write incrsearch + pack $sstring -side left -expand 1 -fill x + ${NS}::radiobutton .bleft.mid.diff -text [mc "Diff"] \ + -command changediffdisp -variable diffelide -value {0 0} + ${NS}::radiobutton .bleft.mid.old -text [mc "Old version"] \ + -command changediffdisp -variable diffelide -value {0 1} + ${NS}::radiobutton .bleft.mid.new -text [mc "New version"] \ + -command changediffdisp -variable diffelide -value {1 0} + ${NS}::label .bleft.mid.labeldiffcontext -text " [mc "Lines of context"]: " + pack .bleft.mid.diff .bleft.mid.old .bleft.mid.new -side left + spinbox .bleft.mid.diffcontext -width 5 \ + -from 0 -increment 1 -to 10000000 \ + -validate all -validatecommand "diffcontextvalidate %P" \ + -textvariable diffcontextstring + .bleft.mid.diffcontext set $diffcontext + trace add variable diffcontextstring write diffcontextchange + lappend entries .bleft.mid.diffcontext + pack .bleft.mid.labeldiffcontext .bleft.mid.diffcontext -side left + ${NS}::checkbutton .bleft.mid.ignspace -text [mc "Ignore space change"] \ + -command changeignorespace -variable ignorespace + pack .bleft.mid.ignspace -side left -padx 5 + + set worddiff [mc "Line diff"] + if {[package vcompare $git_version "1.7.2"] >= 0} { + makedroplist .bleft.mid.worddiff worddiff [mc "Line diff"] \ + [mc "Markup words"] [mc "Color words"] + trace add variable worddiff write changeworddiff + pack .bleft.mid.worddiff -side left -padx 5 + } + + set ctext .bleft.bottom.ctext + text $ctext -background $bgcolor -foreground $fgcolor \ + -state disabled -font textfont \ + -yscrollcommand scrolltext -wrap none \ + -xscrollcommand ".bleft.bottom.sbhorizontal set" + if {$have_tk85} { + $ctext conf -tabstyle wordprocessor + } + ${NS}::scrollbar .bleft.bottom.sb -command "$ctext yview" + ${NS}::scrollbar .bleft.bottom.sbhorizontal -command "$ctext xview" -orient h + pack .bleft.top -side top -fill x + pack .bleft.mid -side top -fill x + grid $ctext .bleft.bottom.sb -sticky nsew + grid .bleft.bottom.sbhorizontal -sticky ew + grid columnconfigure .bleft.bottom 0 -weight 1 + grid rowconfigure .bleft.bottom 0 -weight 1 + grid rowconfigure .bleft.bottom 1 -weight 0 + pack .bleft.bottom -side top -fill both -expand 1 + lappend bglist $ctext + lappend fglist $ctext + + $ctext tag conf comment -wrap $wrapcomment + $ctext tag conf filesep -font textfontbold -back "#aaaaaa" + $ctext tag conf hunksep -fore [lindex $diffcolors 2] + $ctext tag conf d0 -fore [lindex $diffcolors 0] + $ctext tag conf dresult -fore [lindex $diffcolors 1] + $ctext tag conf m0 -fore red + $ctext tag conf m1 -fore blue + $ctext tag conf m2 -fore green + $ctext tag conf m3 -fore purple + $ctext tag conf m4 -fore brown + $ctext tag conf m5 -fore "#009090" + $ctext tag conf m6 -fore magenta + $ctext tag conf m7 -fore "#808000" + $ctext tag conf m8 -fore "#009000" + $ctext tag conf m9 -fore "#ff0080" + $ctext tag conf m10 -fore cyan + $ctext tag conf m11 -fore "#b07070" + $ctext tag conf m12 -fore "#70b0f0" + $ctext tag conf m13 -fore "#70f0b0" + $ctext tag conf m14 -fore "#f0b070" + $ctext tag conf m15 -fore "#ff70b0" + $ctext tag conf mmax -fore darkgrey + set mergemax 16 + $ctext tag conf mresult -font textfontbold + $ctext tag conf msep -font textfontbold + $ctext tag conf found -back yellow + + .pwbottom add .bleft + if {!$use_ttk} { + .pwbottom paneconfigure .bleft -width $geometry(botwidth) + } + + # lower right + ${NS}::frame .bright + ${NS}::frame .bright.mode + ${NS}::radiobutton .bright.mode.patch -text [mc "Patch"] \ + -command reselectline -variable cmitmode -value "patch" + ${NS}::radiobutton .bright.mode.tree -text [mc "Tree"] \ + -command reselectline -variable cmitmode -value "tree" + grid .bright.mode.patch .bright.mode.tree -sticky ew + pack .bright.mode -side top -fill x + set cflist .bright.cfiles + set indent [font measure mainfont "nn"] + text $cflist \ + -selectbackground $selectbgcolor \ + -background $bgcolor -foreground $fgcolor \ + -font mainfont \ + -tabs [list $indent [expr {2 * $indent}]] \ + -yscrollcommand ".bright.sb set" \ + -cursor [. cget -cursor] \ + -spacing1 1 -spacing3 1 + lappend bglist $cflist + lappend fglist $cflist + ${NS}::scrollbar .bright.sb -command "$cflist yview" + pack .bright.sb -side right -fill y + pack $cflist -side left -fill both -expand 1 + $cflist tag configure highlight \ + -background [$cflist cget -selectbackground] + $cflist tag configure bold -font mainfontbold + + .pwbottom add .bright + .ctop add .pwbottom + + # restore window width & height if known + if {[info exists geometry(main)]} { + if {[scan $geometry(main) "%dx%d" w h] >= 2} { + if {$w > [winfo screenwidth .]} { + set w [winfo screenwidth .] + } + if {$h > [winfo screenheight .]} { + set h [winfo screenheight .] + } + wm geometry . "${w}x$h" + } + } + + if {[info exists geometry(state)] && $geometry(state) eq "zoomed"} { + wm state . $geometry(state) + } + + if {[tk windowingsystem] eq {aqua}} { + set M1B M1 + set ::BM "3" + } else { + set M1B Control + set ::BM "2" + } + + if {$use_ttk} { + bind .ctop { + bind %W {} + %W sashpos 0 $::geometry(topheight) + } + bind .pwbottom { + bind %W {} + %W sashpos 0 $::geometry(botwidth) + } + } + + bind .pwbottom {resizecdetpanes %W %w} + pack .ctop -fill both -expand 1 + bindall <1> {selcanvline %W %x %y} + #bindall {selcanvline %W %x %y} + if {[tk windowingsystem] == "win32"} { + bind . { windows_mousewheel_redirector %W %X %Y %D } + bind $ctext { windows_mousewheel_redirector %W %X %Y %D ; break } + } else { + bindall "allcanvs yview scroll -5 units" + bindall "allcanvs yview scroll 5 units" + if {[tk windowingsystem] eq "aqua"} { + bindall { + set delta [expr {- (%D)}] + allcanvs yview scroll $delta units + } + bindall { + set delta [expr {- (%D)}] + $canv xview scroll $delta units + } + } + } + bindall <$::BM> "canvscan mark %W %x %y" + bindall "canvscan dragto %W %x %y" + bind all <$M1B-Key-w> {destroy [winfo toplevel %W]} + bind . <$M1B-Key-w> doquit + bindkey selfirstline + bindkey sellastline + bind . "selnextline -1" + bind . "selnextline 1" + bind . "dofind -1 0" + bind . "dofind 1 0" + bindkey "goforw" + bindkey "goback" + bind . "selnextpage -1" + bind . "selnextpage 1" + bind . <$M1B-Home> "allcanvs yview moveto 0.0" + bind . <$M1B-End> "allcanvs yview moveto 1.0" + bind . <$M1B-Key-Up> "allcanvs yview scroll -1 units" + bind . <$M1B-Key-Down> "allcanvs yview scroll 1 units" + bind . <$M1B-Key-Prior> "allcanvs yview scroll -1 pages" + bind . <$M1B-Key-Next> "allcanvs yview scroll 1 pages" + bindkey "$ctext yview scroll -1 pages" + bindkey "$ctext yview scroll -1 pages" + bindkey "$ctext yview scroll 1 pages" + bindkey p "selnextline -1" + bindkey n "selnextline 1" + bindkey z "goback" + bindkey x "goforw" + bindkey i "selnextline -1" + bindkey k "selnextline 1" + bindkey j "goback" + bindkey l "goforw" + bindkey b prevfile + bindkey d "$ctext yview scroll 18 units" + bindkey u "$ctext yview scroll -18 units" + bindkey / {focus $fstring} + bindkey {focus $fstring} + bindkey {dofind 1 1} + bindkey ? {dofind -1 1} + bindkey f nextfile + bind . updatecommits + bind . <$M1B-F5> reloadcommits + bind . showrefs + bind . {newview 0} + catch { bind . {newview 0} } + bind . edit_or_newview + bind . <$M1B-q> doquit + bind . <$M1B-f> {dofind 1 1} + bind . <$M1B-g> {dofind 1 0} + bind . <$M1B-r> dosearchback + bind . <$M1B-s> dosearch + bind . <$M1B-equal> {incrfont 1} + bind . <$M1B-plus> {incrfont 1} + bind . <$M1B-KP_Add> {incrfont 1} + bind . <$M1B-minus> {incrfont -1} + bind . <$M1B-KP_Subtract> {incrfont -1} + wm protocol . WM_DELETE_WINDOW doquit + bind . {stop_backends} + bind . "click %W" + bind $fstring {dofind 1 1} + bind $sha1entry {gotocommit; break} + bind $sha1entry <> clearsha1 + bind $cflist <1> {sel_flist %W %x %y; break} + bind $cflist {sel_flist %W %x %y; break} + bind $cflist {treeclick %W %x %y} + global ctxbut + bind $cflist $ctxbut {pop_flist_menu %W %X %Y %x %y} + bind $ctext $ctxbut {pop_diff_menu %W %X %Y %x %y} + bind $ctext {focus %W} + + set maincursor [. cget -cursor] + set textcursor [$ctext cget -cursor] + set curtextcursor $textcursor + + set rowctxmenu .rowctxmenu + makemenu $rowctxmenu { + {mc "Diff this -> selected" command {diffvssel 0}} + {mc "Diff selected -> this" command {diffvssel 1}} + {mc "Make patch" command mkpatch} + {mc "Create tag" command mktag} + {mc "Write commit to file" command writecommit} + {mc "Create new branch" command mkbranch} + {mc "Cherry-pick this commit" command cherrypick} + {mc "Reset HEAD branch to here" command resethead} + {mc "Mark this commit" command markhere} + {mc "Return to mark" command gotomark} + {mc "Find descendant of this and mark" command find_common_desc} + {mc "Compare with marked commit" command compare_commits} + } + $rowctxmenu configure -tearoff 0 + + set fakerowmenu .fakerowmenu + makemenu $fakerowmenu { + {mc "Diff this -> selected" command {diffvssel 0}} + {mc "Diff selected -> this" command {diffvssel 1}} + {mc "Make patch" command mkpatch} + } + $fakerowmenu configure -tearoff 0 + + set headctxmenu .headctxmenu + makemenu $headctxmenu { + {mc "Check out this branch" command cobranch} + {mc "Remove this branch" command rmbranch} + } + $headctxmenu configure -tearoff 0 + + global flist_menu + set flist_menu .flistctxmenu + makemenu $flist_menu { + {mc "Highlight this too" command {flist_hl 0}} + {mc "Highlight this only" command {flist_hl 1}} + {mc "External diff" command {external_diff}} + {mc "Blame parent commit" command {external_blame 1}} + } + $flist_menu configure -tearoff 0 + + global diff_menu + set diff_menu .diffctxmenu + makemenu $diff_menu { + {mc "Show origin of this line" command show_line_source} + {mc "Run git gui blame on this line" command {external_blame_diff}} + } + $diff_menu configure -tearoff 0 +} + +# Windows sends all mouse wheel events to the current focused window, not +# the one where the mouse hovers, so bind those events here and redirect +# to the correct window +proc windows_mousewheel_redirector {W X Y D} { + global canv canv2 canv3 + set w [winfo containing -displayof $W $X $Y] + if {$w ne ""} { + set u [expr {$D < 0 ? 5 : -5}] + if {$w == $canv || $w == $canv2 || $w == $canv3} { + allcanvs yview scroll $u units + } else { + catch { + $w yview scroll $u units + } + } + } +} + +# Update row number label when selectedline changes +proc selectedline_change {n1 n2 op} { + global selectedline rownumsel + + if {$selectedline eq {}} { + set rownumsel {} + } else { + set rownumsel [expr {$selectedline + 1}] + } +} + +# mouse-2 makes all windows scan vertically, but only the one +# the cursor is in scans horizontally +proc canvscan {op w x y} { + global canv canv2 canv3 + foreach c [list $canv $canv2 $canv3] { + if {$c == $w} { + $c scan $op $x $y + } else { + $c scan $op 0 $y + } + } +} + +proc scrollcanv {cscroll f0 f1} { + $cscroll set $f0 $f1 + drawvisible + flushhighlights +} + +# when we make a key binding for the toplevel, make sure +# it doesn't get triggered when that key is pressed in the +# find string entry widget. +proc bindkey {ev script} { + global entries + bind . $ev $script + set escript [bind Entry $ev] + if {$escript == {}} { + set escript [bind Entry ] + } + foreach e $entries { + bind $e $ev "$escript; break" + } +} + +# set the focus back to the toplevel for any click outside +# the entry widgets +proc click {w} { + global ctext entries + foreach e [concat $entries $ctext] { + if {$w == $e} return + } + focus . +} + +# Adjust the progress bar for a change in requested extent or canvas size +proc adjustprogress {} { + global progresscanv progressitem progresscoords + global fprogitem fprogcoord lastprogupdate progupdatepending + global rprogitem rprogcoord use_ttk + + if {$use_ttk} { + $progresscanv configure -value [expr {int($fprogcoord * 100)}] + return + } + + set w [expr {[winfo width $progresscanv] - 4}] + set x0 [expr {$w * [lindex $progresscoords 0]}] + set x1 [expr {$w * [lindex $progresscoords 1]}] + set h [winfo height $progresscanv] + $progresscanv coords $progressitem $x0 0 $x1 $h + $progresscanv coords $fprogitem 0 0 [expr {$w * $fprogcoord}] $h + $progresscanv coords $rprogitem 0 0 [expr {$w * $rprogcoord}] $h + set now [clock clicks -milliseconds] + if {$now >= $lastprogupdate + 100} { + set progupdatepending 0 + update + } elseif {!$progupdatepending} { + set progupdatepending 1 + after [expr {$lastprogupdate + 100 - $now}] doprogupdate + } +} + +proc doprogupdate {} { + global lastprogupdate progupdatepending + + if {$progupdatepending} { + set progupdatepending 0 + set lastprogupdate [clock clicks -milliseconds] + update + } +} + +proc savestuff {w} { + global canv canv2 canv3 mainfont textfont uifont tabstop + global stuffsaved findmergefiles maxgraphpct + global maxwidth showneartags showlocalchanges + global viewname viewfiles viewargs viewargscmd viewperm nextviewnum + global cmitmode wrapcomment datetimeformat limitdiffs + global colors uicolor bgcolor fgcolor diffcolors diffcontext selectbgcolor + global autoselect autosellen extdifftool perfile_attrs markbgcolor use_ttk + global hideremotes want_ttk + + if {$stuffsaved} return + if {![winfo viewable .]} return + catch { + if {[file exists ~/.gitk-new]} {file delete -force ~/.gitk-new} + set f [open "~/.gitk-new" w] + if {$::tcl_platform(platform) eq {windows}} { + file attributes "~/.gitk-new" -hidden true + } + puts $f [list set mainfont $mainfont] + puts $f [list set textfont $textfont] + puts $f [list set uifont $uifont] + puts $f [list set tabstop $tabstop] + puts $f [list set findmergefiles $findmergefiles] + puts $f [list set maxgraphpct $maxgraphpct] + puts $f [list set maxwidth $maxwidth] + puts $f [list set cmitmode $cmitmode] + puts $f [list set wrapcomment $wrapcomment] + puts $f [list set autoselect $autoselect] + puts $f [list set autosellen $autosellen] + puts $f [list set showneartags $showneartags] + puts $f [list set hideremotes $hideremotes] + puts $f [list set showlocalchanges $showlocalchanges] + puts $f [list set datetimeformat $datetimeformat] + puts $f [list set limitdiffs $limitdiffs] + puts $f [list set uicolor $uicolor] + puts $f [list set want_ttk $want_ttk] + puts $f [list set bgcolor $bgcolor] + puts $f [list set fgcolor $fgcolor] + puts $f [list set colors $colors] + puts $f [list set diffcolors $diffcolors] + puts $f [list set markbgcolor $markbgcolor] + puts $f [list set diffcontext $diffcontext] + puts $f [list set selectbgcolor $selectbgcolor] + puts $f [list set extdifftool $extdifftool] + puts $f [list set perfile_attrs $perfile_attrs] + + puts $f "set geometry(main) [wm geometry .]" + puts $f "set geometry(state) [wm state .]" + puts $f "set geometry(topwidth) [winfo width .tf]" + puts $f "set geometry(topheight) [winfo height .tf]" + if {$use_ttk} { + puts $f "set geometry(pwsash0) \"[.tf.histframe.pwclist sashpos 0] 1\"" + puts $f "set geometry(pwsash1) \"[.tf.histframe.pwclist sashpos 1] 1\"" + } else { + puts $f "set geometry(pwsash0) \"[.tf.histframe.pwclist sash coord 0]\"" + puts $f "set geometry(pwsash1) \"[.tf.histframe.pwclist sash coord 1]\"" + } + puts $f "set geometry(botwidth) [winfo width .bleft]" + puts $f "set geometry(botheight) [winfo height .bleft]" + + puts -nonewline $f "set permviews {" + for {set v 0} {$v < $nextviewnum} {incr v} { + if {$viewperm($v)} { + puts $f "{[list $viewname($v) $viewfiles($v) $viewargs($v) $viewargscmd($v)]}" + } + } + puts $f "}" + close $f + file rename -force "~/.gitk-new" "~/.gitk" + } + set stuffsaved 1 +} + +proc resizeclistpanes {win w} { + global oldwidth use_ttk + if {[info exists oldwidth($win)]} { + if {$use_ttk} { + set s0 [$win sashpos 0] + set s1 [$win sashpos 1] + } else { + set s0 [$win sash coord 0] + set s1 [$win sash coord 1] + } + if {$w < 60} { + set sash0 [expr {int($w/2 - 2)}] + set sash1 [expr {int($w*5/6 - 2)}] + } else { + set factor [expr {1.0 * $w / $oldwidth($win)}] + set sash0 [expr {int($factor * [lindex $s0 0])}] + set sash1 [expr {int($factor * [lindex $s1 0])}] + if {$sash0 < 30} { + set sash0 30 + } + if {$sash1 < $sash0 + 20} { + set sash1 [expr {$sash0 + 20}] + } + if {$sash1 > $w - 10} { + set sash1 [expr {$w - 10}] + if {$sash0 > $sash1 - 20} { + set sash0 [expr {$sash1 - 20}] + } + } + } + if {$use_ttk} { + $win sashpos 0 $sash0 + $win sashpos 1 $sash1 + } else { + $win sash place 0 $sash0 [lindex $s0 1] + $win sash place 1 $sash1 [lindex $s1 1] + } + } + set oldwidth($win) $w +} + +proc resizecdetpanes {win w} { + global oldwidth use_ttk + if {[info exists oldwidth($win)]} { + if {$use_ttk} { + set s0 [$win sashpos 0] + } else { + set s0 [$win sash coord 0] + } + if {$w < 60} { + set sash0 [expr {int($w*3/4 - 2)}] + } else { + set factor [expr {1.0 * $w / $oldwidth($win)}] + set sash0 [expr {int($factor * [lindex $s0 0])}] + if {$sash0 < 45} { + set sash0 45 + } + if {$sash0 > $w - 15} { + set sash0 [expr {$w - 15}] + } + } + if {$use_ttk} { + $win sashpos 0 $sash0 + } else { + $win sash place 0 $sash0 [lindex $s0 1] + } + } + set oldwidth($win) $w +} + +proc allcanvs args { + global canv canv2 canv3 + eval $canv $args + eval $canv2 $args + eval $canv3 $args +} + +proc bindall {event action} { + global canv canv2 canv3 + bind $canv $event $action + bind $canv2 $event $action + bind $canv3 $event $action +} + +proc about {} { + global uifont NS + set w .about + if {[winfo exists $w]} { + raise $w + return + } + ttk_toplevel $w + wm title $w [mc "About gitk"] + make_transient $w . + message $w.m -text [mc " +Gitk - a commit viewer for git + +Copyright \u00a9 2005-2010 Paul Mackerras + +Use and redistribute under the terms of the GNU General Public License"] \ + -justify center -aspect 400 -border 2 -bg white -relief groove + pack $w.m -side top -fill x -padx 2 -pady 2 + ${NS}::button $w.ok -text [mc "Close"] -command "destroy $w" -default active + pack $w.ok -side bottom + bind $w "focus $w.ok" + bind $w "destroy $w" + bind $w "destroy $w" + tk::PlaceWindow $w widget . +} + +proc keys {} { + global NS + set w .keys + if {[winfo exists $w]} { + raise $w + return + } + if {[tk windowingsystem] eq {aqua}} { + set M1T Cmd + } else { + set M1T Ctrl + } + ttk_toplevel $w + wm title $w [mc "Gitk key bindings"] + make_transient $w . + message $w.m -text " +[mc "Gitk key bindings:"] + +[mc "<%s-Q> Quit" $M1T] +[mc "<%s-W> Close window" $M1T] +[mc " Move to first commit"] +[mc " Move to last commit"] +[mc ", p, i Move up one commit"] +[mc ", n, k Move down one commit"] +[mc ", z, j Go back in history list"] +[mc ", x, l Go forward in history list"] +[mc " Move up one page in commit list"] +[mc " Move down one page in commit list"] +[mc "<%s-Home> Scroll to top of commit list" $M1T] +[mc "<%s-End> Scroll to bottom of commit list" $M1T] +[mc "<%s-Up> Scroll commit list up one line" $M1T] +[mc "<%s-Down> Scroll commit list down one line" $M1T] +[mc "<%s-PageUp> Scroll commit list up one page" $M1T] +[mc "<%s-PageDown> Scroll commit list down one page" $M1T] +[mc " Find backwards (upwards, later commits)"] +[mc " Find forwards (downwards, earlier commits)"] +[mc ", b Scroll diff view up one page"] +[mc " Scroll diff view up one page"] +[mc " Scroll diff view down one page"] +[mc "u Scroll diff view up 18 lines"] +[mc "d Scroll diff view down 18 lines"] +[mc "<%s-F> Find" $M1T] +[mc "<%s-G> Move to next find hit" $M1T] +[mc " Move to next find hit"] +[mc "/ Focus the search box"] +[mc "? Move to previous find hit"] +[mc "f Scroll diff view to next file"] +[mc "<%s-S> Search for next hit in diff view" $M1T] +[mc "<%s-R> Search for previous hit in diff view" $M1T] +[mc "<%s-KP+> Increase font size" $M1T] +[mc "<%s-plus> Increase font size" $M1T] +[mc "<%s-KP-> Decrease font size" $M1T] +[mc "<%s-minus> Decrease font size" $M1T] +[mc " Update"] +" \ + -justify left -bg white -border 2 -relief groove + pack $w.m -side top -fill both -padx 2 -pady 2 + ${NS}::button $w.ok -text [mc "Close"] -command "destroy $w" -default active + bind $w [list destroy $w] + pack $w.ok -side bottom + bind $w "focus $w.ok" + bind $w "destroy $w" + bind $w "destroy $w" +} + +# Procedures for manipulating the file list window at the +# bottom right of the overall window. + +proc treeview {w l openlevs} { + global treecontents treediropen treeheight treeparent treeindex + + set ix 0 + set treeindex() 0 + set lev 0 + set prefix {} + set prefixend -1 + set prefendstack {} + set htstack {} + set ht 0 + set treecontents() {} + $w conf -state normal + foreach f $l { + while {[string range $f 0 $prefixend] ne $prefix} { + if {$lev <= $openlevs} { + $w mark set e:$treeindex($prefix) "end -1c" + $w mark gravity e:$treeindex($prefix) left + } + set treeheight($prefix) $ht + incr ht [lindex $htstack end] + set htstack [lreplace $htstack end end] + set prefixend [lindex $prefendstack end] + set prefendstack [lreplace $prefendstack end end] + set prefix [string range $prefix 0 $prefixend] + incr lev -1 + } + set tail [string range $f [expr {$prefixend+1}] end] + while {[set slash [string first "/" $tail]] >= 0} { + lappend htstack $ht + set ht 0 + lappend prefendstack $prefixend + incr prefixend [expr {$slash + 1}] + set d [string range $tail 0 $slash] + lappend treecontents($prefix) $d + set oldprefix $prefix + append prefix $d + set treecontents($prefix) {} + set treeindex($prefix) [incr ix] + set treeparent($prefix) $oldprefix + set tail [string range $tail [expr {$slash+1}] end] + if {$lev <= $openlevs} { + set ht 1 + set treediropen($prefix) [expr {$lev < $openlevs}] + set bm [expr {$lev == $openlevs? "tri-rt": "tri-dn"}] + $w mark set d:$ix "end -1c" + $w mark gravity d:$ix left + set str "\n" + for {set i 0} {$i < $lev} {incr i} {append str "\t"} + $w insert end $str + $w image create end -align center -image $bm -padx 1 \ + -name a:$ix + $w insert end $d [highlight_tag $prefix] + $w mark set s:$ix "end -1c" + $w mark gravity s:$ix left + } + incr lev + } + if {$tail ne {}} { + if {$lev <= $openlevs} { + incr ht + set str "\n" + for {set i 0} {$i < $lev} {incr i} {append str "\t"} + $w insert end $str + $w insert end $tail [highlight_tag $f] + } + lappend treecontents($prefix) $tail + } + } + while {$htstack ne {}} { + set treeheight($prefix) $ht + incr ht [lindex $htstack end] + set htstack [lreplace $htstack end end] + set prefixend [lindex $prefendstack end] + set prefendstack [lreplace $prefendstack end end] + set prefix [string range $prefix 0 $prefixend] + } + $w conf -state disabled +} + +proc linetoelt {l} { + global treeheight treecontents + + set y 2 + set prefix {} + while {1} { + foreach e $treecontents($prefix) { + if {$y == $l} { + return "$prefix$e" + } + set n 1 + if {[string index $e end] eq "/"} { + set n $treeheight($prefix$e) + if {$y + $n > $l} { + append prefix $e + incr y + break + } + } + incr y $n + } + } +} + +proc highlight_tree {y prefix} { + global treeheight treecontents cflist + + foreach e $treecontents($prefix) { + set path $prefix$e + if {[highlight_tag $path] ne {}} { + $cflist tag add bold $y.0 "$y.0 lineend" + } + incr y + if {[string index $e end] eq "/" && $treeheight($path) > 1} { + set y [highlight_tree $y $path] + } + } + return $y +} + +proc treeclosedir {w dir} { + global treediropen treeheight treeparent treeindex + + set ix $treeindex($dir) + $w conf -state normal + $w delete s:$ix e:$ix + set treediropen($dir) 0 + $w image configure a:$ix -image tri-rt + $w conf -state disabled + set n [expr {1 - $treeheight($dir)}] + while {$dir ne {}} { + incr treeheight($dir) $n + set dir $treeparent($dir) + } +} + +proc treeopendir {w dir} { + global treediropen treeheight treeparent treecontents treeindex + + set ix $treeindex($dir) + $w conf -state normal + $w image configure a:$ix -image tri-dn + $w mark set e:$ix s:$ix + $w mark gravity e:$ix right + set lev 0 + set str "\n" + set n [llength $treecontents($dir)] + for {set x $dir} {$x ne {}} {set x $treeparent($x)} { + incr lev + append str "\t" + incr treeheight($x) $n + } + foreach e $treecontents($dir) { + set de $dir$e + if {[string index $e end] eq "/"} { + set iy $treeindex($de) + $w mark set d:$iy e:$ix + $w mark gravity d:$iy left + $w insert e:$ix $str + set treediropen($de) 0 + $w image create e:$ix -align center -image tri-rt -padx 1 \ + -name a:$iy + $w insert e:$ix $e [highlight_tag $de] + $w mark set s:$iy e:$ix + $w mark gravity s:$iy left + set treeheight($de) 1 + } else { + $w insert e:$ix $str + $w insert e:$ix $e [highlight_tag $de] + } + } + $w mark gravity e:$ix right + $w conf -state disabled + set treediropen($dir) 1 + set top [lindex [split [$w index @0,0] .] 0] + set ht [$w cget -height] + set l [lindex [split [$w index s:$ix] .] 0] + if {$l < $top} { + $w yview $l.0 + } elseif {$l + $n + 1 > $top + $ht} { + set top [expr {$l + $n + 2 - $ht}] + if {$l < $top} { + set top $l + } + $w yview $top.0 + } +} + +proc treeclick {w x y} { + global treediropen cmitmode ctext cflist cflist_top + + if {$cmitmode ne "tree"} return + if {![info exists cflist_top]} return + set l [lindex [split [$w index "@$x,$y"] "."] 0] + $cflist tag remove highlight $cflist_top.0 "$cflist_top.0 lineend" + $cflist tag add highlight $l.0 "$l.0 lineend" + set cflist_top $l + if {$l == 1} { + $ctext yview 1.0 + return + } + set e [linetoelt $l] + if {[string index $e end] ne "/"} { + showfile $e + } elseif {$treediropen($e)} { + treeclosedir $w $e + } else { + treeopendir $w $e + } +} + +proc setfilelist {id} { + global treefilelist cflist jump_to_here + + treeview $cflist $treefilelist($id) 0 + if {$jump_to_here ne {}} { + set f [lindex $jump_to_here 0] + if {[lsearch -exact $treefilelist($id) $f] >= 0} { + showfile $f + } + } +} + +image create bitmap tri-rt -background black -foreground blue -data { + #define tri-rt_width 13 + #define tri-rt_height 13 + static unsigned char tri-rt_bits[] = { + 0x00, 0x00, 0x00, 0x00, 0x10, 0x00, 0x30, 0x00, 0x70, 0x00, 0xf0, 0x00, + 0xf0, 0x01, 0xf0, 0x00, 0x70, 0x00, 0x30, 0x00, 0x10, 0x00, 0x00, 0x00, + 0x00, 0x00}; +} -maskdata { + #define tri-rt-mask_width 13 + #define tri-rt-mask_height 13 + static unsigned char tri-rt-mask_bits[] = { + 0x08, 0x00, 0x18, 0x00, 0x38, 0x00, 0x78, 0x00, 0xf8, 0x00, 0xf8, 0x01, + 0xf8, 0x03, 0xf8, 0x01, 0xf8, 0x00, 0x78, 0x00, 0x38, 0x00, 0x18, 0x00, + 0x08, 0x00}; +} +image create bitmap tri-dn -background black -foreground blue -data { + #define tri-dn_width 13 + #define tri-dn_height 13 + static unsigned char tri-dn_bits[] = { + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xfc, 0x07, 0xf8, 0x03, + 0xf0, 0x01, 0xe0, 0x00, 0x40, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00}; +} -maskdata { + #define tri-dn-mask_width 13 + #define tri-dn-mask_height 13 + static unsigned char tri-dn-mask_bits[] = { + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xff, 0x1f, 0xfe, 0x0f, 0xfc, 0x07, + 0xf8, 0x03, 0xf0, 0x01, 0xe0, 0x00, 0x40, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00}; +} + +image create bitmap reficon-T -background black -foreground yellow -data { + #define tagicon_width 13 + #define tagicon_height 9 + static unsigned char tagicon_bits[] = { + 0x00, 0x00, 0x00, 0x00, 0xf0, 0x07, 0xf8, 0x07, + 0xfc, 0x07, 0xf8, 0x07, 0xf0, 0x07, 0x00, 0x00, 0x00, 0x00}; +} -maskdata { + #define tagicon-mask_width 13 + #define tagicon-mask_height 9 + static unsigned char tagicon-mask_bits[] = { + 0x00, 0x00, 0xf0, 0x0f, 0xf8, 0x0f, 0xfc, 0x0f, + 0xfe, 0x0f, 0xfc, 0x0f, 0xf8, 0x0f, 0xf0, 0x0f, 0x00, 0x00}; +} +set rectdata { + #define headicon_width 13 + #define headicon_height 9 + static unsigned char headicon_bits[] = { + 0x00, 0x00, 0x00, 0x00, 0xf8, 0x07, 0xf8, 0x07, + 0xf8, 0x07, 0xf8, 0x07, 0xf8, 0x07, 0x00, 0x00, 0x00, 0x00}; +} +set rectmask { + #define headicon-mask_width 13 + #define headicon-mask_height 9 + static unsigned char headicon-mask_bits[] = { + 0x00, 0x00, 0xfc, 0x0f, 0xfc, 0x0f, 0xfc, 0x0f, + 0xfc, 0x0f, 0xfc, 0x0f, 0xfc, 0x0f, 0xfc, 0x0f, 0x00, 0x00}; +} +image create bitmap reficon-H -background black -foreground green \ + -data $rectdata -maskdata $rectmask +image create bitmap reficon-o -background black -foreground "#ddddff" \ + -data $rectdata -maskdata $rectmask + +proc init_flist {first} { + global cflist cflist_top difffilestart + + $cflist conf -state normal + $cflist delete 0.0 end + if {$first ne {}} { + $cflist insert end $first + set cflist_top 1 + $cflist tag add highlight 1.0 "1.0 lineend" + } else { + catch {unset cflist_top} + } + $cflist conf -state disabled + set difffilestart {} +} + +proc highlight_tag {f} { + global highlight_paths + + foreach p $highlight_paths { + if {[string match $p $f]} { + return "bold" + } + } + return {} +} + +proc highlight_filelist {} { + global cmitmode cflist + + $cflist conf -state normal + if {$cmitmode ne "tree"} { + set end [lindex [split [$cflist index end] .] 0] + for {set l 2} {$l < $end} {incr l} { + set line [$cflist get $l.0 "$l.0 lineend"] + if {[highlight_tag $line] ne {}} { + $cflist tag add bold $l.0 "$l.0 lineend" + } + } + } else { + highlight_tree 2 {} + } + $cflist conf -state disabled +} + +proc unhighlight_filelist {} { + global cflist + + $cflist conf -state normal + $cflist tag remove bold 1.0 end + $cflist conf -state disabled +} + +proc add_flist {fl} { + global cflist + + $cflist conf -state normal + foreach f $fl { + $cflist insert end "\n" + $cflist insert end $f [highlight_tag $f] + } + $cflist conf -state disabled +} + +proc sel_flist {w x y} { + global ctext difffilestart cflist cflist_top cmitmode + + if {$cmitmode eq "tree"} return + if {![info exists cflist_top]} return + set l [lindex [split [$w index "@$x,$y"] "."] 0] + $cflist tag remove highlight $cflist_top.0 "$cflist_top.0 lineend" + $cflist tag add highlight $l.0 "$l.0 lineend" + set cflist_top $l + if {$l == 1} { + $ctext yview 1.0 + } else { + catch {$ctext yview [lindex $difffilestart [expr {$l - 2}]]} + } +} + +proc pop_flist_menu {w X Y x y} { + global ctext cflist cmitmode flist_menu flist_menu_file + global treediffs diffids + + stopfinding + set l [lindex [split [$w index "@$x,$y"] "."] 0] + if {$l <= 1} return + if {$cmitmode eq "tree"} { + set e [linetoelt $l] + if {[string index $e end] eq "/"} return + } else { + set e [lindex $treediffs($diffids) [expr {$l-2}]] + } + set flist_menu_file $e + set xdiffstate "normal" + if {$cmitmode eq "tree"} { + set xdiffstate "disabled" + } + # Disable "External diff" item in tree mode + $flist_menu entryconf 2 -state $xdiffstate + tk_popup $flist_menu $X $Y +} + +proc find_ctext_fileinfo {line} { + global ctext_file_names ctext_file_lines + + set ok [bsearch $ctext_file_lines $line] + set tline [lindex $ctext_file_lines $ok] + + if {$ok >= [llength $ctext_file_lines] || $line < $tline} { + return {} + } else { + return [list [lindex $ctext_file_names $ok] $tline] + } +} + +proc pop_diff_menu {w X Y x y} { + global ctext diff_menu flist_menu_file + global diff_menu_txtpos diff_menu_line + global diff_menu_filebase + + set diff_menu_txtpos [split [$w index "@$x,$y"] "."] + set diff_menu_line [lindex $diff_menu_txtpos 0] + # don't pop up the menu on hunk-separator or file-separator lines + if {[lsearch -glob [$ctext tag names $diff_menu_line.0] "*sep"] >= 0} { + return + } + stopfinding + set f [find_ctext_fileinfo $diff_menu_line] + if {$f eq {}} return + set flist_menu_file [lindex $f 0] + set diff_menu_filebase [lindex $f 1] + tk_popup $diff_menu $X $Y +} + +proc flist_hl {only} { + global flist_menu_file findstring gdttype + + set x [shellquote $flist_menu_file] + if {$only || $findstring eq {} || $gdttype ne [mc "touching paths:"]} { + set findstring $x + } else { + append findstring " " $x + } + set gdttype [mc "touching paths:"] +} + +proc gitknewtmpdir {} { + global diffnum gitktmpdir gitdir + + if {![info exists gitktmpdir]} { + set gitktmpdir [file join [file dirname $gitdir] \ + [format ".gitk-tmp.%s" [pid]]] + if {[catch {file mkdir $gitktmpdir} err]} { + error_popup "[mc "Error creating temporary directory %s:" $gitktmpdir] $err" + unset gitktmpdir + return {} + } + set diffnum 0 + } + incr diffnum + set diffdir [file join $gitktmpdir $diffnum] + if {[catch {file mkdir $diffdir} err]} { + error_popup "[mc "Error creating temporary directory %s:" $diffdir] $err" + return {} + } + return $diffdir +} + +proc save_file_from_commit {filename output what} { + global nullfile + + if {[catch {exec git show $filename -- > $output} err]} { + if {[string match "fatal: bad revision *" $err]} { + return $nullfile + } + error_popup "[mc "Error getting \"%s\" from %s:" $filename $what] $err" + return {} + } + return $output +} + +proc external_diff_get_one_file {diffid filename diffdir} { + global nullid nullid2 nullfile + global gitdir + + if {$diffid == $nullid} { + set difffile [file join [file dirname $gitdir] $filename] + if {[file exists $difffile]} { + return $difffile + } + return $nullfile + } + if {$diffid == $nullid2} { + set difffile [file join $diffdir "\[index\] [file tail $filename]"] + return [save_file_from_commit :$filename $difffile index] + } + set difffile [file join $diffdir "\[$diffid\] [file tail $filename]"] + return [save_file_from_commit $diffid:$filename $difffile \ + "revision $diffid"] +} + +proc external_diff {} { + global nullid nullid2 + global flist_menu_file + global diffids + global extdifftool + + if {[llength $diffids] == 1} { + # no reference commit given + set diffidto [lindex $diffids 0] + if {$diffidto eq $nullid} { + # diffing working copy with index + set diffidfrom $nullid2 + } elseif {$diffidto eq $nullid2} { + # diffing index with HEAD + set diffidfrom "HEAD" + } else { + # use first parent commit + global parentlist selectedline + set diffidfrom [lindex $parentlist $selectedline 0] + } + } else { + set diffidfrom [lindex $diffids 0] + set diffidto [lindex $diffids 1] + } + + # make sure that several diffs wont collide + set diffdir [gitknewtmpdir] + if {$diffdir eq {}} return + + # gather files to diff + set difffromfile [external_diff_get_one_file $diffidfrom $flist_menu_file $diffdir] + set difftofile [external_diff_get_one_file $diffidto $flist_menu_file $diffdir] + + if {$difffromfile ne {} && $difftofile ne {}} { + set cmd [list [shellsplit $extdifftool] $difffromfile $difftofile] + if {[catch {set fl [open |$cmd r]} err]} { + file delete -force $diffdir + error_popup "$extdifftool: [mc "command failed:"] $err" + } else { + fconfigure $fl -blocking 0 + filerun $fl [list delete_at_eof $fl $diffdir] + } + } +} + +proc find_hunk_blamespec {base line} { + global ctext + + # Find and parse the hunk header + set s_lix [$ctext search -backwards -regexp ^@@ "$line.0 lineend" $base.0] + if {$s_lix eq {}} return + + set s_line [$ctext get $s_lix "$s_lix + 1 lines"] + if {![regexp {^@@@*(( -\d+(,\d+)?)+) \+(\d+)(,\d+)? @@} $s_line \ + s_line old_specs osz osz1 new_line nsz]} { + return + } + + # base lines for the parents + set base_lines [list $new_line] + foreach old_spec [lrange [split $old_specs " "] 1 end] { + if {![regexp -- {-(\d+)(,\d+)?} $old_spec \ + old_spec old_line osz]} { + return + } + lappend base_lines $old_line + } + + # Now scan the lines to determine offset within the hunk + set max_parent [expr {[llength $base_lines]-2}] + set dline 0 + set s_lno [lindex [split $s_lix "."] 0] + + # Determine if the line is removed + set chunk [$ctext get $line.0 "$line.1 + $max_parent chars"] + if {[string match {[-+ ]*} $chunk]} { + set removed_idx [string first "-" $chunk] + # Choose a parent index + if {$removed_idx >= 0} { + set parent $removed_idx + } else { + set unchanged_idx [string first " " $chunk] + if {$unchanged_idx >= 0} { + set parent $unchanged_idx + } else { + # blame the current commit + set parent -1 + } + } + # then count other lines that belong to it + for {set i $line} {[incr i -1] > $s_lno} {} { + set chunk [$ctext get $i.0 "$i.1 + $max_parent chars"] + # Determine if the line is removed + set removed_idx [string first "-" $chunk] + if {$parent >= 0} { + set code [string index $chunk $parent] + if {$code eq "-" || ($removed_idx < 0 && $code ne "+")} { + incr dline + } + } else { + if {$removed_idx < 0} { + incr dline + } + } + } + incr parent + } else { + set parent 0 + } + + incr dline [lindex $base_lines $parent] + return [list $parent $dline] +} + +proc external_blame_diff {} { + global currentid cmitmode + global diff_menu_txtpos diff_menu_line + global diff_menu_filebase flist_menu_file + + if {$cmitmode eq "tree"} { + set parent_idx 0 + set line [expr {$diff_menu_line - $diff_menu_filebase}] + } else { + set hinfo [find_hunk_blamespec $diff_menu_filebase $diff_menu_line] + if {$hinfo ne {}} { + set parent_idx [lindex $hinfo 0] + set line [lindex $hinfo 1] + } else { + set parent_idx 0 + set line 0 + } + } + + external_blame $parent_idx $line +} + +# Find the SHA1 ID of the blob for file $fname in the index +# at stage 0 or 2 +proc index_sha1 {fname} { + set f [open [list | git ls-files -s $fname] r] + while {[gets $f line] >= 0} { + set info [lindex [split $line "\t"] 0] + set stage [lindex $info 2] + if {$stage eq "0" || $stage eq "2"} { + close $f + return [lindex $info 1] + } + } + close $f + return {} +} + +# Turn an absolute path into one relative to the current directory +proc make_relative {f} { + if {[file pathtype $f] eq "relative"} { + return $f + } + set elts [file split $f] + set here [file split [pwd]] + set ei 0 + set hi 0 + set res {} + foreach d $here { + if {$ei < $hi || $ei >= [llength $elts] || [lindex $elts $ei] ne $d} { + lappend res ".." + } else { + incr ei + } + incr hi + } + set elts [concat $res [lrange $elts $ei end]] + return [eval file join $elts] +} + +proc external_blame {parent_idx {line {}}} { + global flist_menu_file gitdir + global nullid nullid2 + global parentlist selectedline currentid + + if {$parent_idx > 0} { + set base_commit [lindex $parentlist $selectedline [expr {$parent_idx-1}]] + } else { + set base_commit $currentid + } + + if {$base_commit eq {} || $base_commit eq $nullid || $base_commit eq $nullid2} { + error_popup [mc "No such commit"] + return + } + + set cmdline [list git gui blame] + if {$line ne {} && $line > 1} { + lappend cmdline "--line=$line" + } + set f [file join [file dirname $gitdir] $flist_menu_file] + # Unfortunately it seems git gui blame doesn't like + # being given an absolute path... + set f [make_relative $f] + lappend cmdline $base_commit $f + if {[catch {eval exec $cmdline &} err]} { + error_popup "[mc "git gui blame: command failed:"] $err" + } +} + +proc show_line_source {} { + global cmitmode currentid parents curview blamestuff blameinst + global diff_menu_line diff_menu_filebase flist_menu_file + global nullid nullid2 gitdir + + set from_index {} + if {$cmitmode eq "tree"} { + set id $currentid + set line [expr {$diff_menu_line - $diff_menu_filebase}] + } else { + set h [find_hunk_blamespec $diff_menu_filebase $diff_menu_line] + if {$h eq {}} return + set pi [lindex $h 0] + if {$pi == 0} { + mark_ctext_line $diff_menu_line + return + } + incr pi -1 + if {$currentid eq $nullid} { + if {$pi > 0} { + # must be a merge in progress... + if {[catch { + # get the last line from .git/MERGE_HEAD + set f [open [file join $gitdir MERGE_HEAD] r] + set id [lindex [split [read $f] "\n"] end-1] + close $f + } err]} { + error_popup [mc "Couldn't read merge head: %s" $err] + return + } + } elseif {$parents($curview,$currentid) eq $nullid2} { + # need to do the blame from the index + if {[catch { + set from_index [index_sha1 $flist_menu_file] + } err]} { + error_popup [mc "Error reading index: %s" $err] + return + } + } else { + set id $parents($curview,$currentid) + } + } else { + set id [lindex $parents($curview,$currentid) $pi] + } + set line [lindex $h 1] + } + set blameargs {} + if {$from_index ne {}} { + lappend blameargs | git cat-file blob $from_index + } + lappend blameargs | git blame -p -L$line,+1 + if {$from_index ne {}} { + lappend blameargs --contents - + } else { + lappend blameargs $id + } + lappend blameargs -- [file join [file dirname $gitdir] $flist_menu_file] + if {[catch { + set f [open $blameargs r] + } err]} { + error_popup [mc "Couldn't start git blame: %s" $err] + return + } + nowbusy blaming [mc "Searching"] + fconfigure $f -blocking 0 + set i [reg_instance $f] + set blamestuff($i) {} + set blameinst $i + filerun $f [list read_line_source $f $i] +} + +proc stopblaming {} { + global blameinst + + if {[info exists blameinst]} { + stop_instance $blameinst + unset blameinst + notbusy blaming + } +} + +proc read_line_source {fd inst} { + global blamestuff curview commfd blameinst nullid nullid2 + + while {[gets $fd line] >= 0} { + lappend blamestuff($inst) $line + } + if {![eof $fd]} { + return 1 + } + unset commfd($inst) + unset blameinst + notbusy blaming + fconfigure $fd -blocking 1 + if {[catch {close $fd} err]} { + error_popup [mc "Error running git blame: %s" $err] + return 0 + } + + set fname {} + set line [split [lindex $blamestuff($inst) 0] " "] + set id [lindex $line 0] + set lnum [lindex $line 1] + if {[string length $id] == 40 && [string is xdigit $id] && + [string is digit -strict $lnum]} { + # look for "filename" line + foreach l $blamestuff($inst) { + if {[string match "filename *" $l]} { + set fname [string range $l 9 end] + break + } + } + } + if {$fname ne {}} { + # all looks good, select it + if {$id eq $nullid} { + # blame uses all-zeroes to mean not committed, + # which would mean a change in the index + set id $nullid2 + } + if {[commitinview $id $curview]} { + selectline [rowofcommit $id] 1 [list $fname $lnum] + } else { + error_popup [mc "That line comes from commit %s, \ + which is not in this view" [shortids $id]] + } + } else { + puts "oops couldn't parse git blame output" + } + return 0 +} + +# delete $dir when we see eof on $f (presumably because the child has exited) +proc delete_at_eof {f dir} { + while {[gets $f line] >= 0} {} + if {[eof $f]} { + if {[catch {close $f} err]} { + error_popup "[mc "External diff viewer failed:"] $err" + } + file delete -force $dir + return 0 + } + return 1 +} + +# Functions for adding and removing shell-type quoting + +proc shellquote {str} { + if {![string match "*\['\"\\ \t]*" $str]} { + return $str + } + if {![string match "*\['\"\\]*" $str]} { + return "\"$str\"" + } + if {![string match "*'*" $str]} { + return "'$str'" + } + return "\"[string map {\" \\\" \\ \\\\} $str]\"" +} + +proc shellarglist {l} { + set str {} + foreach a $l { + if {$str ne {}} { + append str " " + } + append str [shellquote $a] + } + return $str +} + +proc shelldequote {str} { + set ret {} + set used -1 + while {1} { + incr used + if {![regexp -start $used -indices "\['\"\\\\ \t]" $str first]} { + append ret [string range $str $used end] + set used [string length $str] + break + } + set first [lindex $first 0] + set ch [string index $str $first] + if {$first > $used} { + append ret [string range $str $used [expr {$first - 1}]] + set used $first + } + if {$ch eq " " || $ch eq "\t"} break + incr used + if {$ch eq "'"} { + set first [string first "'" $str $used] + if {$first < 0} { + error "unmatched single-quote" + } + append ret [string range $str $used [expr {$first - 1}]] + set used $first + continue + } + if {$ch eq "\\"} { + if {$used >= [string length $str]} { + error "trailing backslash" + } + append ret [string index $str $used] + continue + } + # here ch == "\"" + while {1} { + if {![regexp -start $used -indices "\[\"\\\\]" $str first]} { + error "unmatched double-quote" + } + set first [lindex $first 0] + set ch [string index $str $first] + if {$first > $used} { + append ret [string range $str $used [expr {$first - 1}]] + set used $first + } + if {$ch eq "\""} break + incr used + append ret [string index $str $used] + incr used + } + } + return [list $used $ret] +} + +proc shellsplit {str} { + set l {} + while {1} { + set str [string trimleft $str] + if {$str eq {}} break + set dq [shelldequote $str] + set n [lindex $dq 0] + set word [lindex $dq 1] + set str [string range $str $n end] + lappend l $word + } + return $l +} + +# Code to implement multiple views + +proc newview {ishighlight} { + global nextviewnum newviewname newishighlight + global revtreeargs viewargscmd newviewopts curview + + set newishighlight $ishighlight + set top .gitkview + if {[winfo exists $top]} { + raise $top + return + } + decode_view_opts $nextviewnum $revtreeargs + set newviewname($nextviewnum) "[mc "View"] $nextviewnum" + set newviewopts($nextviewnum,perm) 0 + set newviewopts($nextviewnum,cmd) $viewargscmd($curview) + vieweditor $top $nextviewnum [mc "Gitk view definition"] +} + +set known_view_options { + {perm b . {} {mc "Remember this view"}} + {reflabel l + {} {mc "References (space separated list):"}} + {refs t15 .. {} {mc "Branches & tags:"}} + {allrefs b *. "--all" {mc "All refs"}} + {branches b . "--branches" {mc "All (local) branches"}} + {tags b . "--tags" {mc "All tags"}} + {remotes b . "--remotes" {mc "All remote-tracking branches"}} + {commitlbl l + {} {mc "Commit Info (regular expressions):"}} + {author t15 .. "--author=*" {mc "Author:"}} + {committer t15 . "--committer=*" {mc "Committer:"}} + {loginfo t15 .. "--grep=*" {mc "Commit Message:"}} + {allmatch b .. "--all-match" {mc "Matches all Commit Info criteria"}} + {changes_l l + {} {mc "Changes to Files:"}} + {pickaxe_s r0 . {} {mc "Fixed String"}} + {pickaxe_t r1 . "--pickaxe-regex" {mc "Regular Expression"}} + {pickaxe t15 .. "-S*" {mc "Search string:"}} + {datelabel l + {} {mc "Commit Dates (\"2 weeks ago\", \"2009-03-17 15:27:38\", \"March 17, 2009 15:27:38\"):"}} + {since t15 .. {"--since=*" "--after=*"} {mc "Since:"}} + {until t15 . {"--until=*" "--before=*"} {mc "Until:"}} + {limit_lbl l + {} {mc "Limit and/or skip a number of revisions (positive integer):"}} + {limit t10 *. "--max-count=*" {mc "Number to show:"}} + {skip t10 . "--skip=*" {mc "Number to skip:"}} + {misc_lbl l + {} {mc "Miscellaneous options:"}} + {dorder b *. {"--date-order" "-d"} {mc "Strictly sort by date"}} + {lright b . "--left-right" {mc "Mark branch sides"}} + {first b . "--first-parent" {mc "Limit to first parent"}} + {smplhst b . "--simplify-by-decoration" {mc "Simple history"}} + {args t50 *. {} {mc "Additional arguments to git log:"}} + {allpaths path + {} {mc "Enter files and directories to include, one per line:"}} + {cmd t50= + {} {mc "Command to generate more commits to include:"}} + } + +# Convert $newviewopts($n, ...) into args for git log. +proc encode_view_opts {n} { + global known_view_options newviewopts + + set rargs [list] + foreach opt $known_view_options { + set patterns [lindex $opt 3] + if {$patterns eq {}} continue + set pattern [lindex $patterns 0] + + if {[lindex $opt 1] eq "b"} { + set val $newviewopts($n,[lindex $opt 0]) + if {$val} { + lappend rargs $pattern + } + } elseif {[regexp {^r(\d+)$} [lindex $opt 1] type value]} { + regexp {^(.*_)} [lindex $opt 0] uselessvar button_id + set val $newviewopts($n,$button_id) + if {$val eq $value} { + lappend rargs $pattern + } + } else { + set val $newviewopts($n,[lindex $opt 0]) + set val [string trim $val] + if {$val ne {}} { + set pfix [string range $pattern 0 end-1] + lappend rargs $pfix$val + } + } + } + set rargs [concat $rargs [shellsplit $newviewopts($n,refs)]] + return [concat $rargs [shellsplit $newviewopts($n,args)]] +} + +# Fill $newviewopts($n, ...) based on args for git log. +proc decode_view_opts {n view_args} { + global known_view_options newviewopts + + foreach opt $known_view_options { + set id [lindex $opt 0] + if {[lindex $opt 1] eq "b"} { + # Checkboxes + set val 0 + } elseif {[regexp {^r(\d+)$} [lindex $opt 1]]} { + # Radiobuttons + regexp {^(.*_)} $id uselessvar id + set val 0 + } else { + # Text fields + set val {} + } + set newviewopts($n,$id) $val + } + set oargs [list] + set refargs [list] + foreach arg $view_args { + if {[regexp -- {^-([0-9]+)$} $arg arg cnt] + && ![info exists found(limit)]} { + set newviewopts($n,limit) $cnt + set found(limit) 1 + continue + } + catch { unset val } + foreach opt $known_view_options { + set id [lindex $opt 0] + if {[info exists found($id)]} continue + foreach pattern [lindex $opt 3] { + if {![string match $pattern $arg]} continue + if {[lindex $opt 1] eq "b"} { + # Check buttons + set val 1 + } elseif {[regexp {^r(\d+)$} [lindex $opt 1] match num]} { + # Radio buttons + regexp {^(.*_)} $id uselessvar id + set val $num + } else { + # Text input fields + set size [string length $pattern] + set val [string range $arg [expr {$size-1}] end] + } + set newviewopts($n,$id) $val + set found($id) 1 + break + } + if {[info exists val]} break + } + if {[info exists val]} continue + if {[regexp {^-} $arg]} { + lappend oargs $arg + } else { + lappend refargs $arg + } + } + set newviewopts($n,refs) [shellarglist $refargs] + set newviewopts($n,args) [shellarglist $oargs] +} + +proc edit_or_newview {} { + global curview + + if {$curview > 0} { + editview + } else { + newview 0 + } +} + +proc editview {} { + global curview + global viewname viewperm newviewname newviewopts + global viewargs viewargscmd + + set top .gitkvedit-$curview + if {[winfo exists $top]} { + raise $top + return + } + decode_view_opts $curview $viewargs($curview) + set newviewname($curview) $viewname($curview) + set newviewopts($curview,perm) $viewperm($curview) + set newviewopts($curview,cmd) $viewargscmd($curview) + vieweditor $top $curview "[mc "Gitk: edit view"] $viewname($curview)" +} + +proc vieweditor {top n title} { + global newviewname newviewopts viewfiles bgcolor + global known_view_options NS + + ttk_toplevel $top + wm title $top [concat $title [mc "-- criteria for selecting revisions"]] + make_transient $top . + + # View name + ${NS}::frame $top.nfr + ${NS}::label $top.nl -text [mc "View Name"] + ${NS}::entry $top.name -width 20 -textvariable newviewname($n) + pack $top.nfr -in $top -fill x -pady 5 -padx 3 + pack $top.nl -in $top.nfr -side left -padx {0 5} + pack $top.name -in $top.nfr -side left -padx {0 25} + + # View options + set cframe $top.nfr + set cexpand 0 + set cnt 0 + foreach opt $known_view_options { + set id [lindex $opt 0] + set type [lindex $opt 1] + set flags [lindex $opt 2] + set title [eval [lindex $opt 4]] + set lxpad 0 + + if {$flags eq "+" || $flags eq "*"} { + set cframe $top.fr$cnt + incr cnt + ${NS}::frame $cframe + pack $cframe -in $top -fill x -pady 3 -padx 3 + set cexpand [expr {$flags eq "*"}] + } elseif {$flags eq ".." || $flags eq "*."} { + set cframe $top.fr$cnt + incr cnt + ${NS}::frame $cframe + pack $cframe -in $top -fill x -pady 3 -padx [list 15 3] + set cexpand [expr {$flags eq "*."}] + } else { + set lxpad 5 + } + + if {$type eq "l"} { + ${NS}::label $cframe.l_$id -text $title + pack $cframe.l_$id -in $cframe -side left -pady [list 3 0] -anchor w + } elseif {$type eq "b"} { + ${NS}::checkbutton $cframe.c_$id -text $title -variable newviewopts($n,$id) + pack $cframe.c_$id -in $cframe -side left \ + -padx [list $lxpad 0] -expand $cexpand -anchor w + } elseif {[regexp {^r(\d+)$} $type type sz]} { + regexp {^(.*_)} $id uselessvar button_id + ${NS}::radiobutton $cframe.c_$id -text $title -variable newviewopts($n,$button_id) -value $sz + pack $cframe.c_$id -in $cframe -side left \ + -padx [list $lxpad 0] -expand $cexpand -anchor w + } elseif {[regexp {^t(\d+)$} $type type sz]} { + ${NS}::label $cframe.l_$id -text $title + ${NS}::entry $cframe.e_$id -width $sz -background $bgcolor \ + -textvariable newviewopts($n,$id) + pack $cframe.l_$id -in $cframe -side left -padx [list $lxpad 0] + pack $cframe.e_$id -in $cframe -side left -expand 1 -fill x + } elseif {[regexp {^t(\d+)=$} $type type sz]} { + ${NS}::label $cframe.l_$id -text $title + ${NS}::entry $cframe.e_$id -width $sz -background $bgcolor \ + -textvariable newviewopts($n,$id) + pack $cframe.l_$id -in $cframe -side top -pady [list 3 0] -anchor w + pack $cframe.e_$id -in $cframe -side top -fill x + } elseif {$type eq "path"} { + ${NS}::label $top.l -text $title + pack $top.l -in $top -side top -pady [list 3 0] -anchor w -padx 3 + text $top.t -width 40 -height 5 -background $bgcolor + if {[info exists viewfiles($n)]} { + foreach f $viewfiles($n) { + $top.t insert end $f + $top.t insert end "\n" + } + $top.t delete {end - 1c} end + $top.t mark set insert 0.0 + } + pack $top.t -in $top -side top -pady [list 0 5] -fill both -expand 1 -padx 3 + } + } + + ${NS}::frame $top.buts + ${NS}::button $top.buts.ok -text [mc "OK"] -command [list newviewok $top $n] + ${NS}::button $top.buts.apply -text [mc "Apply (F5)"] -command [list newviewok $top $n 1] + ${NS}::button $top.buts.can -text [mc "Cancel"] -command [list destroy $top] + bind $top [list newviewok $top $n] + bind $top [list newviewok $top $n 1] + bind $top [list destroy $top] + grid $top.buts.ok $top.buts.apply $top.buts.can + grid columnconfigure $top.buts 0 -weight 1 -uniform a + grid columnconfigure $top.buts 1 -weight 1 -uniform a + grid columnconfigure $top.buts 2 -weight 1 -uniform a + pack $top.buts -in $top -side top -fill x + focus $top.t +} + +proc doviewmenu {m first cmd op argv} { + set nmenu [$m index end] + for {set i $first} {$i <= $nmenu} {incr i} { + if {[$m entrycget $i -command] eq $cmd} { + eval $m $op $i $argv + break + } + } +} + +proc allviewmenus {n op args} { + # global viewhlmenu + + doviewmenu .bar.view 5 [list showview $n] $op $args + # doviewmenu $viewhlmenu 1 [list addvhighlight $n] $op $args +} + +proc newviewok {top n {apply 0}} { + global nextviewnum newviewperm newviewname newishighlight + global viewname viewfiles viewperm selectedview curview + global viewargs viewargscmd newviewopts viewhlmenu + + if {[catch { + set newargs [encode_view_opts $n] + } err]} { + error_popup "[mc "Error in commit selection arguments:"] $err" $top + return + } + set files {} + foreach f [split [$top.t get 0.0 end] "\n"] { + set ft [string trim $f] + if {$ft ne {}} { + lappend files $ft + } + } + if {![info exists viewfiles($n)]} { + # creating a new view + incr nextviewnum + set viewname($n) $newviewname($n) + set viewperm($n) $newviewopts($n,perm) + set viewfiles($n) $files + set viewargs($n) $newargs + set viewargscmd($n) $newviewopts($n,cmd) + addviewmenu $n + if {!$newishighlight} { + run showview $n + } else { + run addvhighlight $n + } + } else { + # editing an existing view + set viewperm($n) $newviewopts($n,perm) + if {$newviewname($n) ne $viewname($n)} { + set viewname($n) $newviewname($n) + doviewmenu .bar.view 5 [list showview $n] \ + entryconf [list -label $viewname($n)] + # doviewmenu $viewhlmenu 1 [list addvhighlight $n] \ + # entryconf [list -label $viewname($n) -value $viewname($n)] + } + if {$files ne $viewfiles($n) || $newargs ne $viewargs($n) || \ + $newviewopts($n,cmd) ne $viewargscmd($n)} { + set viewfiles($n) $files + set viewargs($n) $newargs + set viewargscmd($n) $newviewopts($n,cmd) + if {$curview == $n} { + run reloadcommits + } + } + } + if {$apply} return + catch {destroy $top} +} + +proc delview {} { + global curview viewperm hlview selectedhlview + + if {$curview == 0} return + if {[info exists hlview] && $hlview == $curview} { + set selectedhlview [mc "None"] + unset hlview + } + allviewmenus $curview delete + set viewperm($curview) 0 + showview 0 +} + +proc addviewmenu {n} { + global viewname viewhlmenu + + .bar.view add radiobutton -label $viewname($n) \ + -command [list showview $n] -variable selectedview -value $n + #$viewhlmenu add radiobutton -label $viewname($n) \ + # -command [list addvhighlight $n] -variable selectedhlview +} + +proc showview {n} { + global curview cached_commitrow ordertok + global displayorder parentlist rowidlist rowisopt rowfinal + global colormap rowtextx nextcolor canvxmax + global numcommits viewcomplete + global selectedline currentid canv canvy0 + global treediffs + global pending_select mainheadid + global commitidx + global selectedview + global hlview selectedhlview commitinterest + + if {$n == $curview} return + set selid {} + set ymax [lindex [$canv cget -scrollregion] 3] + set span [$canv yview] + set ytop [expr {[lindex $span 0] * $ymax}] + set ybot [expr {[lindex $span 1] * $ymax}] + set yscreen [expr {($ybot - $ytop) / 2}] + if {$selectedline ne {}} { + set selid $currentid + set y [yc $selectedline] + if {$ytop < $y && $y < $ybot} { + set yscreen [expr {$y - $ytop}] + } + } elseif {[info exists pending_select]} { + set selid $pending_select + unset pending_select + } + unselectline + normalline + catch {unset treediffs} + clear_display + if {[info exists hlview] && $hlview == $n} { + unset hlview + set selectedhlview [mc "None"] + } + catch {unset commitinterest} + catch {unset cached_commitrow} + catch {unset ordertok} + + set curview $n + set selectedview $n + .bar.view entryconf [mca "Edit view..."] -state [expr {$n == 0? "disabled": "normal"}] + .bar.view entryconf [mca "Delete view"] -state [expr {$n == 0? "disabled": "normal"}] + + run refill_reflist + if {![info exists viewcomplete($n)]} { + getcommits $selid + return + } + + set displayorder {} + set parentlist {} + set rowidlist {} + set rowisopt {} + set rowfinal {} + set numcommits $commitidx($n) + + catch {unset colormap} + catch {unset rowtextx} + set nextcolor 0 + set canvxmax [$canv cget -width] + set curview $n + set row 0 + setcanvscroll + set yf 0 + set row {} + if {$selid ne {} && [commitinview $selid $n]} { + set row [rowofcommit $selid] + # try to get the selected row in the same position on the screen + set ymax [lindex [$canv cget -scrollregion] 3] + set ytop [expr {[yc $row] - $yscreen}] + if {$ytop < 0} { + set ytop 0 + } + set yf [expr {$ytop * 1.0 / $ymax}] + } + allcanvs yview moveto $yf + drawvisible + if {$row ne {}} { + selectline $row 0 + } elseif {!$viewcomplete($n)} { + reset_pending_select $selid + } else { + reset_pending_select {} + + if {[commitinview $pending_select $curview]} { + selectline [rowofcommit $pending_select] 1 + } else { + set row [first_real_row] + if {$row < $numcommits} { + selectline $row 0 + } + } + } + if {!$viewcomplete($n)} { + if {$numcommits == 0} { + show_status [mc "Reading commits..."] + } + } elseif {$numcommits == 0} { + show_status [mc "No commits selected"] + } +} + +# Stuff relating to the highlighting facility + +proc ishighlighted {id} { + global vhighlights fhighlights nhighlights rhighlights + + if {[info exists nhighlights($id)] && $nhighlights($id) > 0} { + return $nhighlights($id) + } + if {[info exists vhighlights($id)] && $vhighlights($id) > 0} { + return $vhighlights($id) + } + if {[info exists fhighlights($id)] && $fhighlights($id) > 0} { + return $fhighlights($id) + } + if {[info exists rhighlights($id)] && $rhighlights($id) > 0} { + return $rhighlights($id) + } + return 0 +} + +proc bolden {id font} { + global canv linehtag currentid boldids need_redisplay markedid + + # need_redisplay = 1 means the display is stale and about to be redrawn + if {$need_redisplay} return + lappend boldids $id + $canv itemconf $linehtag($id) -font $font + if {[info exists currentid] && $id eq $currentid} { + $canv delete secsel + set t [eval $canv create rect [$canv bbox $linehtag($id)] \ + -outline {{}} -tags secsel \ + -fill [$canv cget -selectbackground]] + $canv lower $t + } + if {[info exists markedid] && $id eq $markedid} { + make_idmark $id + } +} + +proc bolden_name {id font} { + global canv2 linentag currentid boldnameids need_redisplay + + if {$need_redisplay} return + lappend boldnameids $id + $canv2 itemconf $linentag($id) -font $font + if {[info exists currentid] && $id eq $currentid} { + $canv2 delete secsel + set t [eval $canv2 create rect [$canv2 bbox $linentag($id)] \ + -outline {{}} -tags secsel \ + -fill [$canv2 cget -selectbackground]] + $canv2 lower $t + } +} + +proc unbolden {} { + global boldids + + set stillbold {} + foreach id $boldids { + if {![ishighlighted $id]} { + bolden $id mainfont + } else { + lappend stillbold $id + } + } + set boldids $stillbold +} + +proc addvhighlight {n} { + global hlview viewcomplete curview vhl_done commitidx + + if {[info exists hlview]} { + delvhighlight + } + set hlview $n + if {$n != $curview && ![info exists viewcomplete($n)]} { + start_rev_list $n + } + set vhl_done $commitidx($hlview) + if {$vhl_done > 0} { + drawvisible + } +} + +proc delvhighlight {} { + global hlview vhighlights + + if {![info exists hlview]} return + unset hlview + catch {unset vhighlights} + unbolden +} + +proc vhighlightmore {} { + global hlview vhl_done commitidx vhighlights curview + + set max $commitidx($hlview) + set vr [visiblerows] + set r0 [lindex $vr 0] + set r1 [lindex $vr 1] + for {set i $vhl_done} {$i < $max} {incr i} { + set id [commitonrow $i $hlview] + if {[commitinview $id $curview]} { + set row [rowofcommit $id] + if {$r0 <= $row && $row <= $r1} { + if {![highlighted $row]} { + bolden $id mainfontbold + } + set vhighlights($id) 1 + } + } + } + set vhl_done $max + return 0 +} + +proc askvhighlight {row id} { + global hlview vhighlights iddrawn + + if {[commitinview $id $hlview]} { + if {[info exists iddrawn($id)] && ![ishighlighted $id]} { + bolden $id mainfontbold + } + set vhighlights($id) 1 + } else { + set vhighlights($id) 0 + } +} + +proc hfiles_change {} { + global highlight_files filehighlight fhighlights fh_serial + global highlight_paths + + if {[info exists filehighlight]} { + # delete previous highlights + catch {close $filehighlight} + unset filehighlight + catch {unset fhighlights} + unbolden + unhighlight_filelist + } + set highlight_paths {} + after cancel do_file_hl $fh_serial + incr fh_serial + if {$highlight_files ne {}} { + after 300 do_file_hl $fh_serial + } +} + +proc gdttype_change {name ix op} { + global gdttype highlight_files findstring findpattern + + stopfinding + if {$findstring ne {}} { + if {$gdttype eq [mc "containing:"]} { + if {$highlight_files ne {}} { + set highlight_files {} + hfiles_change + } + findcom_change + } else { + if {$findpattern ne {}} { + set findpattern {} + findcom_change + } + set highlight_files $findstring + hfiles_change + } + drawvisible + } + # enable/disable findtype/findloc menus too +} + +proc find_change {name ix op} { + global gdttype findstring highlight_files + + stopfinding + if {$gdttype eq [mc "containing:"]} { + findcom_change + } else { + if {$highlight_files ne $findstring} { + set highlight_files $findstring + hfiles_change + } + } + drawvisible +} + +proc findcom_change args { + global nhighlights boldnameids + global findpattern findtype findstring gdttype + + stopfinding + # delete previous highlights, if any + foreach id $boldnameids { + bolden_name $id mainfont + } + set boldnameids {} + catch {unset nhighlights} + unbolden + unmarkmatches + if {$gdttype ne [mc "containing:"] || $findstring eq {}} { + set findpattern {} + } elseif {$findtype eq [mc "Regexp"]} { + set findpattern $findstring + } else { + set e [string map {"*" "\\*" "?" "\\?" "\[" "\\\[" "\\" "\\\\"} \ + $findstring] + set findpattern "*$e*" + } +} + +proc makepatterns {l} { + set ret {} + foreach e $l { + set ee [string map {"*" "\\*" "?" "\\?" "\[" "\\\[" "\\" "\\\\"} $e] + if {[string index $ee end] eq "/"} { + lappend ret "$ee*" + } else { + lappend ret $ee + lappend ret "$ee/*" + } + } + return $ret +} + +proc do_file_hl {serial} { + global highlight_files filehighlight highlight_paths gdttype fhl_list + + if {$gdttype eq [mc "touching paths:"]} { + if {[catch {set paths [shellsplit $highlight_files]}]} return + set highlight_paths [makepatterns $paths] + highlight_filelist + set gdtargs [concat -- $paths] + } elseif {$gdttype eq [mc "adding/removing string:"]} { + set gdtargs [list "-S$highlight_files"] + } else { + # must be "containing:", i.e. we're searching commit info + return + } + set cmd [concat | git diff-tree -r -s --stdin $gdtargs] + set filehighlight [open $cmd r+] + fconfigure $filehighlight -blocking 0 + filerun $filehighlight readfhighlight + set fhl_list {} + drawvisible + flushhighlights +} + +proc flushhighlights {} { + global filehighlight fhl_list + + if {[info exists filehighlight]} { + lappend fhl_list {} + puts $filehighlight "" + flush $filehighlight + } +} + +proc askfilehighlight {row id} { + global filehighlight fhighlights fhl_list + + lappend fhl_list $id + set fhighlights($id) -1 + puts $filehighlight $id +} + +proc readfhighlight {} { + global filehighlight fhighlights curview iddrawn + global fhl_list find_dirn + + if {![info exists filehighlight]} { + return 0 + } + set nr 0 + while {[incr nr] <= 100 && [gets $filehighlight line] >= 0} { + set line [string trim $line] + set i [lsearch -exact $fhl_list $line] + if {$i < 0} continue + for {set j 0} {$j < $i} {incr j} { + set id [lindex $fhl_list $j] + set fhighlights($id) 0 + } + set fhl_list [lrange $fhl_list [expr {$i+1}] end] + if {$line eq {}} continue + if {![commitinview $line $curview]} continue + if {[info exists iddrawn($line)] && ![ishighlighted $line]} { + bolden $line mainfontbold + } + set fhighlights($line) 1 + } + if {[eof $filehighlight]} { + # strange... + puts "oops, git diff-tree died" + catch {close $filehighlight} + unset filehighlight + return 0 + } + if {[info exists find_dirn]} { + run findmore + } + return 1 +} + +proc doesmatch {f} { + global findtype findpattern + + if {$findtype eq [mc "Regexp"]} { + return [regexp $findpattern $f] + } elseif {$findtype eq [mc "IgnCase"]} { + return [string match -nocase $findpattern $f] + } else { + return [string match $findpattern $f] + } +} + +proc askfindhighlight {row id} { + global nhighlights commitinfo iddrawn + global findloc + global markingmatches + + if {![info exists commitinfo($id)]} { + getcommit $id + } + set info $commitinfo($id) + set isbold 0 + set fldtypes [list [mc Headline] [mc Author] [mc Date] [mc Committer] [mc CDate] [mc Comments]] + foreach f $info ty $fldtypes { + if {($findloc eq [mc "All fields"] || $findloc eq $ty) && + [doesmatch $f]} { + if {$ty eq [mc "Author"]} { + set isbold 2 + break + } + set isbold 1 + } + } + if {$isbold && [info exists iddrawn($id)]} { + if {![ishighlighted $id]} { + bolden $id mainfontbold + if {$isbold > 1} { + bolden_name $id mainfontbold + } + } + if {$markingmatches} { + markrowmatches $row $id + } + } + set nhighlights($id) $isbold +} + +proc markrowmatches {row id} { + global canv canv2 linehtag linentag commitinfo findloc + + set headline [lindex $commitinfo($id) 0] + set author [lindex $commitinfo($id) 1] + $canv delete match$row + $canv2 delete match$row + if {$findloc eq [mc "All fields"] || $findloc eq [mc "Headline"]} { + set m [findmatches $headline] + if {$m ne {}} { + markmatches $canv $row $headline $linehtag($id) $m \ + [$canv itemcget $linehtag($id) -font] $row + } + } + if {$findloc eq [mc "All fields"] || $findloc eq [mc "Author"]} { + set m [findmatches $author] + if {$m ne {}} { + markmatches $canv2 $row $author $linentag($id) $m \ + [$canv2 itemcget $linentag($id) -font] $row + } + } +} + +proc vrel_change {name ix op} { + global highlight_related + + rhighlight_none + if {$highlight_related ne [mc "None"]} { + run drawvisible + } +} + +# prepare for testing whether commits are descendents or ancestors of a +proc rhighlight_sel {a} { + global descendent desc_todo ancestor anc_todo + global highlight_related + + catch {unset descendent} + set desc_todo [list $a] + catch {unset ancestor} + set anc_todo [list $a] + if {$highlight_related ne [mc "None"]} { + rhighlight_none + run drawvisible + } +} + +proc rhighlight_none {} { + global rhighlights + + catch {unset rhighlights} + unbolden +} + +proc is_descendent {a} { + global curview children descendent desc_todo + + set v $curview + set la [rowofcommit $a] + set todo $desc_todo + set leftover {} + set done 0 + for {set i 0} {$i < [llength $todo]} {incr i} { + set do [lindex $todo $i] + if {[rowofcommit $do] < $la} { + lappend leftover $do + continue + } + foreach nk $children($v,$do) { + if {![info exists descendent($nk)]} { + set descendent($nk) 1 + lappend todo $nk + if {$nk eq $a} { + set done 1 + } + } + } + if {$done} { + set desc_todo [concat $leftover [lrange $todo [expr {$i+1}] end]] + return + } + } + set descendent($a) 0 + set desc_todo $leftover +} + +proc is_ancestor {a} { + global curview parents ancestor anc_todo + + set v $curview + set la [rowofcommit $a] + set todo $anc_todo + set leftover {} + set done 0 + for {set i 0} {$i < [llength $todo]} {incr i} { + set do [lindex $todo $i] + if {![commitinview $do $v] || [rowofcommit $do] > $la} { + lappend leftover $do + continue + } + foreach np $parents($v,$do) { + if {![info exists ancestor($np)]} { + set ancestor($np) 1 + lappend todo $np + if {$np eq $a} { + set done 1 + } + } + } + if {$done} { + set anc_todo [concat $leftover [lrange $todo [expr {$i+1}] end]] + return + } + } + set ancestor($a) 0 + set anc_todo $leftover +} + +proc askrelhighlight {row id} { + global descendent highlight_related iddrawn rhighlights + global selectedline ancestor + + if {$selectedline eq {}} return + set isbold 0 + if {$highlight_related eq [mc "Descendant"] || + $highlight_related eq [mc "Not descendant"]} { + if {![info exists descendent($id)]} { + is_descendent $id + } + if {$descendent($id) == ($highlight_related eq [mc "Descendant"])} { + set isbold 1 + } + } elseif {$highlight_related eq [mc "Ancestor"] || + $highlight_related eq [mc "Not ancestor"]} { + if {![info exists ancestor($id)]} { + is_ancestor $id + } + if {$ancestor($id) == ($highlight_related eq [mc "Ancestor"])} { + set isbold 1 + } + } + if {[info exists iddrawn($id)]} { + if {$isbold && ![ishighlighted $id]} { + bolden $id mainfontbold + } + } + set rhighlights($id) $isbold +} + +# Graph layout functions + +proc shortids {ids} { + set res {} + foreach id $ids { + if {[llength $id] > 1} { + lappend res [shortids $id] + } elseif {[regexp {^[0-9a-f]{40}$} $id]} { + lappend res [string range $id 0 7] + } else { + lappend res $id + } + } + return $res +} + +proc ntimes {n o} { + set ret {} + set o [list $o] + for {set mask 1} {$mask <= $n} {incr mask $mask} { + if {($n & $mask) != 0} { + set ret [concat $ret $o] + } + set o [concat $o $o] + } + return $ret +} + +proc ordertoken {id} { + global ordertok curview varcid varcstart varctok curview parents children + global nullid nullid2 + + if {[info exists ordertok($id)]} { + return $ordertok($id) + } + set origid $id + set todo {} + while {1} { + if {[info exists varcid($curview,$id)]} { + set a $varcid($curview,$id) + set p [lindex $varcstart($curview) $a] + } else { + set p [lindex $children($curview,$id) 0] + } + if {[info exists ordertok($p)]} { + set tok $ordertok($p) + break + } + set id [first_real_child $curview,$p] + if {$id eq {}} { + # it's a root + set tok [lindex $varctok($curview) $varcid($curview,$p)] + break + } + if {[llength $parents($curview,$id)] == 1} { + lappend todo [list $p {}] + } else { + set j [lsearch -exact $parents($curview,$id) $p] + if {$j < 0} { + puts "oops didn't find [shortids $p] in parents of [shortids $id]" + } + lappend todo [list $p [strrep $j]] + } + } + for {set i [llength $todo]} {[incr i -1] >= 0} {} { + set p [lindex $todo $i 0] + append tok [lindex $todo $i 1] + set ordertok($p) $tok + } + set ordertok($origid) $tok + return $tok +} + +# Work out where id should go in idlist so that order-token +# values increase from left to right +proc idcol {idlist id {i 0}} { + set t [ordertoken $id] + if {$i < 0} { + set i 0 + } + if {$i >= [llength $idlist] || $t < [ordertoken [lindex $idlist $i]]} { + if {$i > [llength $idlist]} { + set i [llength $idlist] + } + while {[incr i -1] >= 0 && $t < [ordertoken [lindex $idlist $i]]} {} + incr i + } else { + if {$t > [ordertoken [lindex $idlist $i]]} { + while {[incr i] < [llength $idlist] && + $t >= [ordertoken [lindex $idlist $i]]} {} + } + } + return $i +} + +proc initlayout {} { + global rowidlist rowisopt rowfinal displayorder parentlist + global numcommits canvxmax canv + global nextcolor + global colormap rowtextx + + set numcommits 0 + set displayorder {} + set parentlist {} + set nextcolor 0 + set rowidlist {} + set rowisopt {} + set rowfinal {} + set canvxmax [$canv cget -width] + catch {unset colormap} + catch {unset rowtextx} + setcanvscroll +} + +proc setcanvscroll {} { + global canv canv2 canv3 numcommits linespc canvxmax canvy0 + global lastscrollset lastscrollrows + + set ymax [expr {$canvy0 + ($numcommits - 0.5) * $linespc + 2}] + $canv conf -scrollregion [list 0 0 $canvxmax $ymax] + $canv2 conf -scrollregion [list 0 0 0 $ymax] + $canv3 conf -scrollregion [list 0 0 0 $ymax] + set lastscrollset [clock clicks -milliseconds] + set lastscrollrows $numcommits +} + +proc visiblerows {} { + global canv numcommits linespc + + set ymax [lindex [$canv cget -scrollregion] 3] + if {$ymax eq {} || $ymax == 0} return + set f [$canv yview] + set y0 [expr {int([lindex $f 0] * $ymax)}] + set r0 [expr {int(($y0 - 3) / $linespc) - 1}] + if {$r0 < 0} { + set r0 0 + } + set y1 [expr {int([lindex $f 1] * $ymax)}] + set r1 [expr {int(($y1 - 3) / $linespc) + 1}] + if {$r1 >= $numcommits} { + set r1 [expr {$numcommits - 1}] + } + return [list $r0 $r1] +} + +proc layoutmore {} { + global commitidx viewcomplete curview + global numcommits pending_select curview + global lastscrollset lastscrollrows + + if {$lastscrollrows < 100 || $viewcomplete($curview) || + [clock clicks -milliseconds] - $lastscrollset > 500} { + setcanvscroll + } + if {[info exists pending_select] && + [commitinview $pending_select $curview]} { + update + selectline [rowofcommit $pending_select] 1 + } + drawvisible +} + +# With path limiting, we mightn't get the actual HEAD commit, +# so ask git rev-list what is the first ancestor of HEAD that +# touches a file in the path limit. +proc get_viewmainhead {view} { + global viewmainheadid vfilelimit viewinstances mainheadid + + catch { + set rfd [open [concat | git rev-list -1 $mainheadid \ + -- $vfilelimit($view)] r] + set j [reg_instance $rfd] + lappend viewinstances($view) $j + fconfigure $rfd -blocking 0 + filerun $rfd [list getviewhead $rfd $j $view] + set viewmainheadid($curview) {} + } +} + +# git rev-list should give us just 1 line to use as viewmainheadid($view) +proc getviewhead {fd inst view} { + global viewmainheadid commfd curview viewinstances showlocalchanges + + set id {} + if {[gets $fd line] < 0} { + if {![eof $fd]} { + return 1 + } + } elseif {[string length $line] == 40 && [string is xdigit $line]} { + set id $line + } + set viewmainheadid($view) $id + close $fd + unset commfd($inst) + set i [lsearch -exact $viewinstances($view) $inst] + if {$i >= 0} { + set viewinstances($view) [lreplace $viewinstances($view) $i $i] + } + if {$showlocalchanges && $id ne {} && $view == $curview} { + doshowlocalchanges + } + return 0 +} + +proc doshowlocalchanges {} { + global curview viewmainheadid + + if {$viewmainheadid($curview) eq {}} return + if {[commitinview $viewmainheadid($curview) $curview]} { + dodiffindex + } else { + interestedin $viewmainheadid($curview) dodiffindex + } +} + +proc dohidelocalchanges {} { + global nullid nullid2 lserial curview + + if {[commitinview $nullid $curview]} { + removefakerow $nullid + } + if {[commitinview $nullid2 $curview]} { + removefakerow $nullid2 + } + incr lserial +} + +# spawn off a process to do git diff-index --cached HEAD +proc dodiffindex {} { + global lserial showlocalchanges vfilelimit curview + global isworktree + + if {!$showlocalchanges || !$isworktree} return + incr lserial + set cmd "|git diff-index --cached HEAD" + if {$vfilelimit($curview) ne {}} { + set cmd [concat $cmd -- $vfilelimit($curview)] + } + set fd [open $cmd r] + fconfigure $fd -blocking 0 + set i [reg_instance $fd] + filerun $fd [list readdiffindex $fd $lserial $i] +} + +proc readdiffindex {fd serial inst} { + global viewmainheadid nullid nullid2 curview commitinfo commitdata lserial + global vfilelimit + + set isdiff 1 + if {[gets $fd line] < 0} { + if {![eof $fd]} { + return 1 + } + set isdiff 0 + } + # we only need to see one line and we don't really care what it says... + stop_instance $inst + + if {$serial != $lserial} { + return 0 + } + + # now see if there are any local changes not checked in to the index + set cmd "|git diff-files" + if {$vfilelimit($curview) ne {}} { + set cmd [concat $cmd -- $vfilelimit($curview)] + } + set fd [open $cmd r] + fconfigure $fd -blocking 0 + set i [reg_instance $fd] + filerun $fd [list readdifffiles $fd $serial $i] + + if {$isdiff && ![commitinview $nullid2 $curview]} { + # add the line for the changes in the index to the graph + set hl [mc "Local changes checked in to index but not committed"] + set commitinfo($nullid2) [list $hl {} {} {} {} " $hl\n"] + set commitdata($nullid2) "\n $hl\n" + if {[commitinview $nullid $curview]} { + removefakerow $nullid + } + insertfakerow $nullid2 $viewmainheadid($curview) + } elseif {!$isdiff && [commitinview $nullid2 $curview]} { + if {[commitinview $nullid $curview]} { + removefakerow $nullid + } + removefakerow $nullid2 + } + return 0 +} + +proc readdifffiles {fd serial inst} { + global viewmainheadid nullid nullid2 curview + global commitinfo commitdata lserial + + set isdiff 1 + if {[gets $fd line] < 0} { + if {![eof $fd]} { + return 1 + } + set isdiff 0 + } + # we only need to see one line and we don't really care what it says... + stop_instance $inst + + if {$serial != $lserial} { + return 0 + } + + if {$isdiff && ![commitinview $nullid $curview]} { + # add the line for the local diff to the graph + set hl [mc "Local uncommitted changes, not checked in to index"] + set commitinfo($nullid) [list $hl {} {} {} {} " $hl\n"] + set commitdata($nullid) "\n $hl\n" + if {[commitinview $nullid2 $curview]} { + set p $nullid2 + } else { + set p $viewmainheadid($curview) + } + insertfakerow $nullid $p + } elseif {!$isdiff && [commitinview $nullid $curview]} { + removefakerow $nullid + } + return 0 +} + +proc nextuse {id row} { + global curview children + + if {[info exists children($curview,$id)]} { + foreach kid $children($curview,$id) { + if {![commitinview $kid $curview]} { + return -1 + } + if {[rowofcommit $kid] > $row} { + return [rowofcommit $kid] + } + } + } + if {[commitinview $id $curview]} { + return [rowofcommit $id] + } + return -1 +} + +proc prevuse {id row} { + global curview children + + set ret -1 + if {[info exists children($curview,$id)]} { + foreach kid $children($curview,$id) { + if {![commitinview $kid $curview]} break + if {[rowofcommit $kid] < $row} { + set ret [rowofcommit $kid] + } + } + } + return $ret +} + +proc make_idlist {row} { + global displayorder parentlist uparrowlen downarrowlen mingaplen + global commitidx curview children + + set r [expr {$row - $mingaplen - $downarrowlen - 1}] + if {$r < 0} { + set r 0 + } + set ra [expr {$row - $downarrowlen}] + if {$ra < 0} { + set ra 0 + } + set rb [expr {$row + $uparrowlen}] + if {$rb > $commitidx($curview)} { + set rb $commitidx($curview) + } + make_disporder $r [expr {$rb + 1}] + set ids {} + for {} {$r < $ra} {incr r} { + set nextid [lindex $displayorder [expr {$r + 1}]] + foreach p [lindex $parentlist $r] { + if {$p eq $nextid} continue + set rn [nextuse $p $r] + if {$rn >= $row && + $rn <= $r + $downarrowlen + $mingaplen + $uparrowlen} { + lappend ids [list [ordertoken $p] $p] + } + } + } + for {} {$r < $row} {incr r} { + set nextid [lindex $displayorder [expr {$r + 1}]] + foreach p [lindex $parentlist $r] { + if {$p eq $nextid} continue + set rn [nextuse $p $r] + if {$rn < 0 || $rn >= $row} { + lappend ids [list [ordertoken $p] $p] + } + } + } + set id [lindex $displayorder $row] + lappend ids [list [ordertoken $id] $id] + while {$r < $rb} { + foreach p [lindex $parentlist $r] { + set firstkid [lindex $children($curview,$p) 0] + if {[rowofcommit $firstkid] < $row} { + lappend ids [list [ordertoken $p] $p] + } + } + incr r + set id [lindex $displayorder $r] + if {$id ne {}} { + set firstkid [lindex $children($curview,$id) 0] + if {$firstkid ne {} && [rowofcommit $firstkid] < $row} { + lappend ids [list [ordertoken $id] $id] + } + } + } + set idlist {} + foreach idx [lsort -unique $ids] { + lappend idlist [lindex $idx 1] + } + return $idlist +} + +proc rowsequal {a b} { + while {[set i [lsearch -exact $a {}]] >= 0} { + set a [lreplace $a $i $i] + } + while {[set i [lsearch -exact $b {}]] >= 0} { + set b [lreplace $b $i $i] + } + return [expr {$a eq $b}] +} + +proc makeupline {id row rend col} { + global rowidlist uparrowlen downarrowlen mingaplen + + for {set r $rend} {1} {set r $rstart} { + set rstart [prevuse $id $r] + if {$rstart < 0} return + if {$rstart < $row} break + } + if {$rstart + $uparrowlen + $mingaplen + $downarrowlen < $rend} { + set rstart [expr {$rend - $uparrowlen - 1}] + } + for {set r $rstart} {[incr r] <= $row} {} { + set idlist [lindex $rowidlist $r] + if {$idlist ne {} && [lsearch -exact $idlist $id] < 0} { + set col [idcol $idlist $id $col] + lset rowidlist $r [linsert $idlist $col $id] + changedrow $r + } + } +} + +proc layoutrows {row endrow} { + global rowidlist rowisopt rowfinal displayorder + global uparrowlen downarrowlen maxwidth mingaplen + global children parentlist + global commitidx viewcomplete curview + + make_disporder [expr {$row - 1}] [expr {$endrow + $uparrowlen}] + set idlist {} + if {$row > 0} { + set rm1 [expr {$row - 1}] + foreach id [lindex $rowidlist $rm1] { + if {$id ne {}} { + lappend idlist $id + } + } + set final [lindex $rowfinal $rm1] + } + for {} {$row < $endrow} {incr row} { + set rm1 [expr {$row - 1}] + if {$rm1 < 0 || $idlist eq {}} { + set idlist [make_idlist $row] + set final 1 + } else { + set id [lindex $displayorder $rm1] + set col [lsearch -exact $idlist $id] + set idlist [lreplace $idlist $col $col] + foreach p [lindex $parentlist $rm1] { + if {[lsearch -exact $idlist $p] < 0} { + set col [idcol $idlist $p $col] + set idlist [linsert $idlist $col $p] + # if not the first child, we have to insert a line going up + if {$id ne [lindex $children($curview,$p) 0]} { + makeupline $p $rm1 $row $col + } + } + } + set id [lindex $displayorder $row] + if {$row > $downarrowlen} { + set termrow [expr {$row - $downarrowlen - 1}] + foreach p [lindex $parentlist $termrow] { + set i [lsearch -exact $idlist $p] + if {$i < 0} continue + set nr [nextuse $p $termrow] + if {$nr < 0 || $nr >= $row + $mingaplen + $uparrowlen} { + set idlist [lreplace $idlist $i $i] + } + } + } + set col [lsearch -exact $idlist $id] + if {$col < 0} { + set col [idcol $idlist $id] + set idlist [linsert $idlist $col $id] + if {$children($curview,$id) ne {}} { + makeupline $id $rm1 $row $col + } + } + set r [expr {$row + $uparrowlen - 1}] + if {$r < $commitidx($curview)} { + set x $col + foreach p [lindex $parentlist $r] { + if {[lsearch -exact $idlist $p] >= 0} continue + set fk [lindex $children($curview,$p) 0] + if {[rowofcommit $fk] < $row} { + set x [idcol $idlist $p $x] + set idlist [linsert $idlist $x $p] + } + } + if {[incr r] < $commitidx($curview)} { + set p [lindex $displayorder $r] + if {[lsearch -exact $idlist $p] < 0} { + set fk [lindex $children($curview,$p) 0] + if {$fk ne {} && [rowofcommit $fk] < $row} { + set x [idcol $idlist $p $x] + set idlist [linsert $idlist $x $p] + } + } + } + } + } + if {$final && !$viewcomplete($curview) && + $row + $uparrowlen + $mingaplen + $downarrowlen + >= $commitidx($curview)} { + set final 0 + } + set l [llength $rowidlist] + if {$row == $l} { + lappend rowidlist $idlist + lappend rowisopt 0 + lappend rowfinal $final + } elseif {$row < $l} { + if {![rowsequal $idlist [lindex $rowidlist $row]]} { + lset rowidlist $row $idlist + changedrow $row + } + lset rowfinal $row $final + } else { + set pad [ntimes [expr {$row - $l}] {}] + set rowidlist [concat $rowidlist $pad] + lappend rowidlist $idlist + set rowfinal [concat $rowfinal $pad] + lappend rowfinal $final + set rowisopt [concat $rowisopt [ntimes [expr {$row - $l + 1}] 0]] + } + } + return $row +} + +proc changedrow {row} { + global displayorder iddrawn rowisopt need_redisplay + + set l [llength $rowisopt] + if {$row < $l} { + lset rowisopt $row 0 + if {$row + 1 < $l} { + lset rowisopt [expr {$row + 1}] 0 + if {$row + 2 < $l} { + lset rowisopt [expr {$row + 2}] 0 + } + } + } + set id [lindex $displayorder $row] + if {[info exists iddrawn($id)]} { + set need_redisplay 1 + } +} + +proc insert_pad {row col npad} { + global rowidlist + + set pad [ntimes $npad {}] + set idlist [lindex $rowidlist $row] + set bef [lrange $idlist 0 [expr {$col - 1}]] + set aft [lrange $idlist $col end] + set i [lsearch -exact $aft {}] + if {$i > 0} { + set aft [lreplace $aft $i $i] + } + lset rowidlist $row [concat $bef $pad $aft] + changedrow $row +} + +proc optimize_rows {row col endrow} { + global rowidlist rowisopt displayorder curview children + + if {$row < 1} { + set row 1 + } + for {} {$row < $endrow} {incr row; set col 0} { + if {[lindex $rowisopt $row]} continue + set haspad 0 + set y0 [expr {$row - 1}] + set ym [expr {$row - 2}] + set idlist [lindex $rowidlist $row] + set previdlist [lindex $rowidlist $y0] + if {$idlist eq {} || $previdlist eq {}} continue + if {$ym >= 0} { + set pprevidlist [lindex $rowidlist $ym] + if {$pprevidlist eq {}} continue + } else { + set pprevidlist {} + } + set x0 -1 + set xm -1 + for {} {$col < [llength $idlist]} {incr col} { + set id [lindex $idlist $col] + if {[lindex $previdlist $col] eq $id} continue + if {$id eq {}} { + set haspad 1 + continue + } + set x0 [lsearch -exact $previdlist $id] + if {$x0 < 0} continue + set z [expr {$x0 - $col}] + set isarrow 0 + set z0 {} + if {$ym >= 0} { + set xm [lsearch -exact $pprevidlist $id] + if {$xm >= 0} { + set z0 [expr {$xm - $x0}] + } + } + if {$z0 eq {}} { + # if row y0 is the first child of $id then it's not an arrow + if {[lindex $children($curview,$id) 0] ne + [lindex $displayorder $y0]} { + set isarrow 1 + } + } + if {!$isarrow && $id ne [lindex $displayorder $row] && + [lsearch -exact [lindex $rowidlist [expr {$row+1}]] $id] < 0} { + set isarrow 1 + } + # Looking at lines from this row to the previous row, + # make them go straight up if they end in an arrow on + # the previous row; otherwise make them go straight up + # or at 45 degrees. + if {$z < -1 || ($z < 0 && $isarrow)} { + # Line currently goes left too much; + # insert pads in the previous row, then optimize it + set npad [expr {-1 - $z + $isarrow}] + insert_pad $y0 $x0 $npad + if {$y0 > 0} { + optimize_rows $y0 $x0 $row + } + set previdlist [lindex $rowidlist $y0] + set x0 [lsearch -exact $previdlist $id] + set z [expr {$x0 - $col}] + if {$z0 ne {}} { + set pprevidlist [lindex $rowidlist $ym] + set xm [lsearch -exact $pprevidlist $id] + set z0 [expr {$xm - $x0}] + } + } elseif {$z > 1 || ($z > 0 && $isarrow)} { + # Line currently goes right too much; + # insert pads in this line + set npad [expr {$z - 1 + $isarrow}] + insert_pad $row $col $npad + set idlist [lindex $rowidlist $row] + incr col $npad + set z [expr {$x0 - $col}] + set haspad 1 + } + if {$z0 eq {} && !$isarrow && $ym >= 0} { + # this line links to its first child on row $row-2 + set id [lindex $displayorder $ym] + set xc [lsearch -exact $pprevidlist $id] + if {$xc >= 0} { + set z0 [expr {$xc - $x0}] + } + } + # avoid lines jigging left then immediately right + if {$z0 ne {} && $z < 0 && $z0 > 0} { + insert_pad $y0 $x0 1 + incr x0 + optimize_rows $y0 $x0 $row + set previdlist [lindex $rowidlist $y0] + } + } + if {!$haspad} { + # Find the first column that doesn't have a line going right + for {set col [llength $idlist]} {[incr col -1] >= 0} {} { + set id [lindex $idlist $col] + if {$id eq {}} break + set x0 [lsearch -exact $previdlist $id] + if {$x0 < 0} { + # check if this is the link to the first child + set kid [lindex $displayorder $y0] + if {[lindex $children($curview,$id) 0] eq $kid} { + # it is, work out offset to child + set x0 [lsearch -exact $previdlist $kid] + } + } + if {$x0 <= $col} break + } + # Insert a pad at that column as long as it has a line and + # isn't the last column + if {$x0 >= 0 && [incr col] < [llength $idlist]} { + set idlist [linsert $idlist $col {}] + lset rowidlist $row $idlist + changedrow $row + } + } + } +} + +proc xc {row col} { + global canvx0 linespc + return [expr {$canvx0 + $col * $linespc}] +} + +proc yc {row} { + global canvy0 linespc + return [expr {$canvy0 + $row * $linespc}] +} + +proc linewidth {id} { + global thickerline lthickness + + set wid $lthickness + if {[info exists thickerline] && $id eq $thickerline} { + set wid [expr {2 * $lthickness}] + } + return $wid +} + +proc rowranges {id} { + global curview children uparrowlen downarrowlen + global rowidlist + + set kids $children($curview,$id) + if {$kids eq {}} { + return {} + } + set ret {} + lappend kids $id + foreach child $kids { + if {![commitinview $child $curview]} break + set row [rowofcommit $child] + if {![info exists prev]} { + lappend ret [expr {$row + 1}] + } else { + if {$row <= $prevrow} { + puts "oops children of [shortids $id] out of order [shortids $child] $row <= [shortids $prev] $prevrow" + } + # see if the line extends the whole way from prevrow to row + if {$row > $prevrow + $uparrowlen + $downarrowlen && + [lsearch -exact [lindex $rowidlist \ + [expr {int(($row + $prevrow) / 2)}]] $id] < 0} { + # it doesn't, see where it ends + set r [expr {$prevrow + $downarrowlen}] + if {[lsearch -exact [lindex $rowidlist $r] $id] < 0} { + while {[incr r -1] > $prevrow && + [lsearch -exact [lindex $rowidlist $r] $id] < 0} {} + } else { + while {[incr r] <= $row && + [lsearch -exact [lindex $rowidlist $r] $id] >= 0} {} + incr r -1 + } + lappend ret $r + # see where it starts up again + set r [expr {$row - $uparrowlen}] + if {[lsearch -exact [lindex $rowidlist $r] $id] < 0} { + while {[incr r] < $row && + [lsearch -exact [lindex $rowidlist $r] $id] < 0} {} + } else { + while {[incr r -1] >= $prevrow && + [lsearch -exact [lindex $rowidlist $r] $id] >= 0} {} + incr r + } + lappend ret $r + } + } + if {$child eq $id} { + lappend ret $row + } + set prev $child + set prevrow $row + } + return $ret +} + +proc drawlineseg {id row endrow arrowlow} { + global rowidlist displayorder iddrawn linesegs + global canv colormap linespc curview maxlinelen parentlist + + set cols [list [lsearch -exact [lindex $rowidlist $row] $id]] + set le [expr {$row + 1}] + set arrowhigh 1 + while {1} { + set c [lsearch -exact [lindex $rowidlist $le] $id] + if {$c < 0} { + incr le -1 + break + } + lappend cols $c + set x [lindex $displayorder $le] + if {$x eq $id} { + set arrowhigh 0 + break + } + if {[info exists iddrawn($x)] || $le == $endrow} { + set c [lsearch -exact [lindex $rowidlist [expr {$le+1}]] $id] + if {$c >= 0} { + lappend cols $c + set arrowhigh 0 + } + break + } + incr le + } + if {$le <= $row} { + return $row + } + + set lines {} + set i 0 + set joinhigh 0 + if {[info exists linesegs($id)]} { + set lines $linesegs($id) + foreach li $lines { + set r0 [lindex $li 0] + if {$r0 > $row} { + if {$r0 == $le && [lindex $li 1] - $row <= $maxlinelen} { + set joinhigh 1 + } + break + } + incr i + } + } + set joinlow 0 + if {$i > 0} { + set li [lindex $lines [expr {$i-1}]] + set r1 [lindex $li 1] + if {$r1 == $row && $le - [lindex $li 0] <= $maxlinelen} { + set joinlow 1 + } + } + + set x [lindex $cols [expr {$le - $row}]] + set xp [lindex $cols [expr {$le - 1 - $row}]] + set dir [expr {$xp - $x}] + if {$joinhigh} { + set ith [lindex $lines $i 2] + set coords [$canv coords $ith] + set ah [$canv itemcget $ith -arrow] + set arrowhigh [expr {$ah eq "first" || $ah eq "both"}] + set x2 [lindex $cols [expr {$le + 1 - $row}]] + if {$x2 ne {} && $x - $x2 == $dir} { + set coords [lrange $coords 0 end-2] + } + } else { + set coords [list [xc $le $x] [yc $le]] + } + if {$joinlow} { + set itl [lindex $lines [expr {$i-1}] 2] + set al [$canv itemcget $itl -arrow] + set arrowlow [expr {$al eq "last" || $al eq "both"}] + } elseif {$arrowlow} { + if {[lsearch -exact [lindex $rowidlist [expr {$row-1}]] $id] >= 0 || + [lsearch -exact [lindex $parentlist [expr {$row-1}]] $id] >= 0} { + set arrowlow 0 + } + } + set arrow [lindex {none first last both} [expr {$arrowhigh + 2*$arrowlow}]] + for {set y $le} {[incr y -1] > $row} {} { + set x $xp + set xp [lindex $cols [expr {$y - 1 - $row}]] + set ndir [expr {$xp - $x}] + if {$dir != $ndir || $xp < 0} { + lappend coords [xc $y $x] [yc $y] + } + set dir $ndir + } + if {!$joinlow} { + if {$xp < 0} { + # join parent line to first child + set ch [lindex $displayorder $row] + set xc [lsearch -exact [lindex $rowidlist $row] $ch] + if {$xc < 0} { + puts "oops: drawlineseg: child $ch not on row $row" + } elseif {$xc != $x} { + if {($arrowhigh && $le == $row + 1) || $dir == 0} { + set d [expr {int(0.5 * $linespc)}] + set x1 [xc $row $x] + if {$xc < $x} { + set x2 [expr {$x1 - $d}] + } else { + set x2 [expr {$x1 + $d}] + } + set y2 [yc $row] + set y1 [expr {$y2 + $d}] + lappend coords $x1 $y1 $x2 $y2 + } elseif {$xc < $x - 1} { + lappend coords [xc $row [expr {$x-1}]] [yc $row] + } elseif {$xc > $x + 1} { + lappend coords [xc $row [expr {$x+1}]] [yc $row] + } + set x $xc + } + lappend coords [xc $row $x] [yc $row] + } else { + set xn [xc $row $xp] + set yn [yc $row] + lappend coords $xn $yn + } + if {!$joinhigh} { + assigncolor $id + set t [$canv create line $coords -width [linewidth $id] \ + -fill $colormap($id) -tags lines.$id -arrow $arrow] + $canv lower $t + bindline $t $id + set lines [linsert $lines $i [list $row $le $t]] + } else { + $canv coords $ith $coords + if {$arrow ne $ah} { + $canv itemconf $ith -arrow $arrow + } + lset lines $i 0 $row + } + } else { + set xo [lsearch -exact [lindex $rowidlist [expr {$row - 1}]] $id] + set ndir [expr {$xo - $xp}] + set clow [$canv coords $itl] + if {$dir == $ndir} { + set clow [lrange $clow 2 end] + } + set coords [concat $coords $clow] + if {!$joinhigh} { + lset lines [expr {$i-1}] 1 $le + } else { + # coalesce two pieces + $canv delete $ith + set b [lindex $lines [expr {$i-1}] 0] + set e [lindex $lines $i 1] + set lines [lreplace $lines [expr {$i-1}] $i [list $b $e $itl]] + } + $canv coords $itl $coords + if {$arrow ne $al} { + $canv itemconf $itl -arrow $arrow + } + } + + set linesegs($id) $lines + return $le +} + +proc drawparentlinks {id row} { + global rowidlist canv colormap curview parentlist + global idpos linespc + + set rowids [lindex $rowidlist $row] + set col [lsearch -exact $rowids $id] + if {$col < 0} return + set olds [lindex $parentlist $row] + set row2 [expr {$row + 1}] + set x [xc $row $col] + set y [yc $row] + set y2 [yc $row2] + set d [expr {int(0.5 * $linespc)}] + set ymid [expr {$y + $d}] + set ids [lindex $rowidlist $row2] + # rmx = right-most X coord used + set rmx 0 + foreach p $olds { + set i [lsearch -exact $ids $p] + if {$i < 0} { + puts "oops, parent $p of $id not in list" + continue + } + set x2 [xc $row2 $i] + if {$x2 > $rmx} { + set rmx $x2 + } + set j [lsearch -exact $rowids $p] + if {$j < 0} { + # drawlineseg will do this one for us + continue + } + assigncolor $p + # should handle duplicated parents here... + set coords [list $x $y] + if {$i != $col} { + # if attaching to a vertical segment, draw a smaller + # slant for visual distinctness + if {$i == $j} { + if {$i < $col} { + lappend coords [expr {$x2 + $d}] $y $x2 $ymid + } else { + lappend coords [expr {$x2 - $d}] $y $x2 $ymid + } + } elseif {$i < $col && $i < $j} { + # segment slants towards us already + lappend coords [xc $row $j] $y + } else { + if {$i < $col - 1} { + lappend coords [expr {$x2 + $linespc}] $y + } elseif {$i > $col + 1} { + lappend coords [expr {$x2 - $linespc}] $y + } + lappend coords $x2 $y2 + } + } else { + lappend coords $x2 $y2 + } + set t [$canv create line $coords -width [linewidth $p] \ + -fill $colormap($p) -tags lines.$p] + $canv lower $t + bindline $t $p + } + if {$rmx > [lindex $idpos($id) 1]} { + lset idpos($id) 1 $rmx + redrawtags $id + } +} + +proc drawlines {id} { + global canv + + $canv itemconf lines.$id -width [linewidth $id] +} + +proc drawcmittext {id row col} { + global linespc canv canv2 canv3 fgcolor curview + global cmitlisted commitinfo rowidlist parentlist + global rowtextx idpos idtags idheads idotherrefs + global linehtag linentag linedtag selectedline + global canvxmax boldids boldnameids fgcolor markedid + global mainheadid nullid nullid2 circleitem circlecolors ctxbut + + # listed is 0 for boundary, 1 for normal, 2 for negative, 3 for left, 4 for right + set listed $cmitlisted($curview,$id) + if {$id eq $nullid} { + set ofill red + } elseif {$id eq $nullid2} { + set ofill green + } elseif {$id eq $mainheadid} { + set ofill yellow + } else { + set ofill [lindex $circlecolors $listed] + } + set x [xc $row $col] + set y [yc $row] + set orad [expr {$linespc / 3}] + if {$listed <= 2} { + set t [$canv create oval [expr {$x - $orad}] [expr {$y - $orad}] \ + [expr {$x + $orad - 1}] [expr {$y + $orad - 1}] \ + -fill $ofill -outline $fgcolor -width 1 -tags circle] + } elseif {$listed == 3} { + # triangle pointing left for left-side commits + set t [$canv create polygon \ + [expr {$x - $orad}] $y \ + [expr {$x + $orad - 1}] [expr {$y - $orad}] \ + [expr {$x + $orad - 1}] [expr {$y + $orad - 1}] \ + -fill $ofill -outline $fgcolor -width 1 -tags circle] + } else { + # triangle pointing right for right-side commits + set t [$canv create polygon \ + [expr {$x + $orad - 1}] $y \ + [expr {$x - $orad}] [expr {$y - $orad}] \ + [expr {$x - $orad}] [expr {$y + $orad - 1}] \ + -fill $ofill -outline $fgcolor -width 1 -tags circle] + } + set circleitem($row) $t + $canv raise $t + $canv bind $t <1> {selcanvline {} %x %y} + set rmx [llength [lindex $rowidlist $row]] + set olds [lindex $parentlist $row] + if {$olds ne {}} { + set nextids [lindex $rowidlist [expr {$row + 1}]] + foreach p $olds { + set i [lsearch -exact $nextids $p] + if {$i > $rmx} { + set rmx $i + } + } + } + set xt [xc $row $rmx] + set rowtextx($row) $xt + set idpos($id) [list $x $xt $y] + if {[info exists idtags($id)] || [info exists idheads($id)] + || [info exists idotherrefs($id)]} { + set xt [drawtags $id $x $xt $y] + } + set headline [lindex $commitinfo($id) 0] + set name [lindex $commitinfo($id) 1] + set date [lindex $commitinfo($id) 2] + set date [formatdate $date] + set font mainfont + set nfont mainfont + set isbold [ishighlighted $id] + if {$isbold > 0} { + lappend boldids $id + set font mainfontbold + if {$isbold > 1} { + lappend boldnameids $id + set nfont mainfontbold + } + } + set linehtag($id) [$canv create text $xt $y -anchor w -fill $fgcolor \ + -text $headline -font $font -tags text] + $canv bind $linehtag($id) $ctxbut "rowmenu %X %Y $id" + set linentag($id) [$canv2 create text 3 $y -anchor w -fill $fgcolor \ + -text $name -font $nfont -tags text] + set linedtag($id) [$canv3 create text 3 $y -anchor w -fill $fgcolor \ + -text $date -font mainfont -tags text] + if {$selectedline == $row} { + make_secsel $id + } + if {[info exists markedid] && $markedid eq $id} { + make_idmark $id + } + set xr [expr {$xt + [font measure $font $headline]}] + if {$xr > $canvxmax} { + set canvxmax $xr + setcanvscroll + } +} + +proc drawcmitrow {row} { + global displayorder rowidlist nrows_drawn + global iddrawn markingmatches + global commitinfo numcommits + global filehighlight fhighlights findpattern nhighlights + global hlview vhighlights + global highlight_related rhighlights + + if {$row >= $numcommits} return + + set id [lindex $displayorder $row] + if {[info exists hlview] && ![info exists vhighlights($id)]} { + askvhighlight $row $id + } + if {[info exists filehighlight] && ![info exists fhighlights($id)]} { + askfilehighlight $row $id + } + if {$findpattern ne {} && ![info exists nhighlights($id)]} { + askfindhighlight $row $id + } + if {$highlight_related ne [mc "None"] && ![info exists rhighlights($id)]} { + askrelhighlight $row $id + } + if {![info exists iddrawn($id)]} { + set col [lsearch -exact [lindex $rowidlist $row] $id] + if {$col < 0} { + puts "oops, row $row id $id not in list" + return + } + if {![info exists commitinfo($id)]} { + getcommit $id + } + assigncolor $id + drawcmittext $id $row $col + set iddrawn($id) 1 + incr nrows_drawn + } + if {$markingmatches} { + markrowmatches $row $id + } +} + +proc drawcommits {row {endrow {}}} { + global numcommits iddrawn displayorder curview need_redisplay + global parentlist rowidlist rowfinal uparrowlen downarrowlen nrows_drawn + + if {$row < 0} { + set row 0 + } + if {$endrow eq {}} { + set endrow $row + } + if {$endrow >= $numcommits} { + set endrow [expr {$numcommits - 1}] + } + + set rl1 [expr {$row - $downarrowlen - 3}] + if {$rl1 < 0} { + set rl1 0 + } + set ro1 [expr {$row - 3}] + if {$ro1 < 0} { + set ro1 0 + } + set r2 [expr {$endrow + $uparrowlen + 3}] + if {$r2 > $numcommits} { + set r2 $numcommits + } + for {set r $rl1} {$r < $r2} {incr r} { + if {[lindex $rowidlist $r] ne {} && [lindex $rowfinal $r]} { + if {$rl1 < $r} { + layoutrows $rl1 $r + } + set rl1 [expr {$r + 1}] + } + } + if {$rl1 < $r} { + layoutrows $rl1 $r + } + optimize_rows $ro1 0 $r2 + if {$need_redisplay || $nrows_drawn > 2000} { + clear_display + } + + # make the lines join to already-drawn rows either side + set r [expr {$row - 1}] + if {$r < 0 || ![info exists iddrawn([lindex $displayorder $r])]} { + set r $row + } + set er [expr {$endrow + 1}] + if {$er >= $numcommits || + ![info exists iddrawn([lindex $displayorder $er])]} { + set er $endrow + } + for {} {$r <= $er} {incr r} { + set id [lindex $displayorder $r] + set wasdrawn [info exists iddrawn($id)] + drawcmitrow $r + if {$r == $er} break + set nextid [lindex $displayorder [expr {$r + 1}]] + if {$wasdrawn && [info exists iddrawn($nextid)]} continue + drawparentlinks $id $r + + set rowids [lindex $rowidlist $r] + foreach lid $rowids { + if {$lid eq {}} continue + if {[info exists lineend($lid)] && $lineend($lid) > $r} continue + if {$lid eq $id} { + # see if this is the first child of any of its parents + foreach p [lindex $parentlist $r] { + if {[lsearch -exact $rowids $p] < 0} { + # make this line extend up to the child + set lineend($p) [drawlineseg $p $r $er 0] + } + } + } else { + set lineend($lid) [drawlineseg $lid $r $er 1] + } + } + } +} + +proc undolayout {row} { + global uparrowlen mingaplen downarrowlen + global rowidlist rowisopt rowfinal need_redisplay + + set r [expr {$row - ($uparrowlen + $mingaplen + $downarrowlen)}] + if {$r < 0} { + set r 0 + } + if {[llength $rowidlist] > $r} { + incr r -1 + set rowidlist [lrange $rowidlist 0 $r] + set rowfinal [lrange $rowfinal 0 $r] + set rowisopt [lrange $rowisopt 0 $r] + set need_redisplay 1 + run drawvisible + } +} + +proc drawvisible {} { + global canv linespc curview vrowmod selectedline targetrow targetid + global need_redisplay cscroll numcommits + + set fs [$canv yview] + set ymax [lindex [$canv cget -scrollregion] 3] + if {$ymax eq {} || $ymax == 0 || $numcommits == 0} return + set f0 [lindex $fs 0] + set f1 [lindex $fs 1] + set y0 [expr {int($f0 * $ymax)}] + set y1 [expr {int($f1 * $ymax)}] + + if {[info exists targetid]} { + if {[commitinview $targetid $curview]} { + set r [rowofcommit $targetid] + if {$r != $targetrow} { + # Fix up the scrollregion and change the scrolling position + # now that our target row has moved. + set diff [expr {($r - $targetrow) * $linespc}] + set targetrow $r + setcanvscroll + set ymax [lindex [$canv cget -scrollregion] 3] + incr y0 $diff + incr y1 $diff + set f0 [expr {$y0 / $ymax}] + set f1 [expr {$y1 / $ymax}] + allcanvs yview moveto $f0 + $cscroll set $f0 $f1 + set need_redisplay 1 + } + } else { + unset targetid + } + } + + set row [expr {int(($y0 - 3) / $linespc) - 1}] + set endrow [expr {int(($y1 - 3) / $linespc) + 1}] + if {$endrow >= $vrowmod($curview)} { + update_arcrows $curview + } + if {$selectedline ne {} && + $row <= $selectedline && $selectedline <= $endrow} { + set targetrow $selectedline + } elseif {[info exists targetid]} { + set targetrow [expr {int(($row + $endrow) / 2)}] + } + if {[info exists targetrow]} { + if {$targetrow >= $numcommits} { + set targetrow [expr {$numcommits - 1}] + } + set targetid [commitonrow $targetrow] + } + drawcommits $row $endrow +} + +proc clear_display {} { + global iddrawn linesegs need_redisplay nrows_drawn + global vhighlights fhighlights nhighlights rhighlights + global linehtag linentag linedtag boldids boldnameids + + allcanvs delete all + catch {unset iddrawn} + catch {unset linesegs} + catch {unset linehtag} + catch {unset linentag} + catch {unset linedtag} + set boldids {} + set boldnameids {} + catch {unset vhighlights} + catch {unset fhighlights} + catch {unset nhighlights} + catch {unset rhighlights} + set need_redisplay 0 + set nrows_drawn 0 +} + +proc findcrossings {id} { + global rowidlist parentlist numcommits displayorder + + set cross {} + set ccross {} + foreach {s e} [rowranges $id] { + if {$e >= $numcommits} { + set e [expr {$numcommits - 1}] + } + if {$e <= $s} continue + for {set row $e} {[incr row -1] >= $s} {} { + set x [lsearch -exact [lindex $rowidlist $row] $id] + if {$x < 0} break + set olds [lindex $parentlist $row] + set kid [lindex $displayorder $row] + set kidx [lsearch -exact [lindex $rowidlist $row] $kid] + if {$kidx < 0} continue + set nextrow [lindex $rowidlist [expr {$row + 1}]] + foreach p $olds { + set px [lsearch -exact $nextrow $p] + if {$px < 0} continue + if {($kidx < $x && $x < $px) || ($px < $x && $x < $kidx)} { + if {[lsearch -exact $ccross $p] >= 0} continue + if {$x == $px + ($kidx < $px? -1: 1)} { + lappend ccross $p + } elseif {[lsearch -exact $cross $p] < 0} { + lappend cross $p + } + } + } + } + } + return [concat $ccross {{}} $cross] +} + +proc assigncolor {id} { + global colormap colors nextcolor + global parents children children curview + + if {[info exists colormap($id)]} return + set ncolors [llength $colors] + if {[info exists children($curview,$id)]} { + set kids $children($curview,$id) + } else { + set kids {} + } + if {[llength $kids] == 1} { + set child [lindex $kids 0] + if {[info exists colormap($child)] + && [llength $parents($curview,$child)] == 1} { + set colormap($id) $colormap($child) + return + } + } + set badcolors {} + set origbad {} + foreach x [findcrossings $id] { + if {$x eq {}} { + # delimiter between corner crossings and other crossings + if {[llength $badcolors] >= $ncolors - 1} break + set origbad $badcolors + } + if {[info exists colormap($x)] + && [lsearch -exact $badcolors $colormap($x)] < 0} { + lappend badcolors $colormap($x) + } + } + if {[llength $badcolors] >= $ncolors} { + set badcolors $origbad + } + set origbad $badcolors + if {[llength $badcolors] < $ncolors - 1} { + foreach child $kids { + if {[info exists colormap($child)] + && [lsearch -exact $badcolors $colormap($child)] < 0} { + lappend badcolors $colormap($child) + } + foreach p $parents($curview,$child) { + if {[info exists colormap($p)] + && [lsearch -exact $badcolors $colormap($p)] < 0} { + lappend badcolors $colormap($p) + } + } + } + if {[llength $badcolors] >= $ncolors} { + set badcolors $origbad + } + } + for {set i 0} {$i <= $ncolors} {incr i} { + set c [lindex $colors $nextcolor] + if {[incr nextcolor] >= $ncolors} { + set nextcolor 0 + } + if {[lsearch -exact $badcolors $c]} break + } + set colormap($id) $c +} + +proc bindline {t id} { + global canv + + $canv bind $t "lineenter %x %y $id" + $canv bind $t "linemotion %x %y $id" + $canv bind $t "lineleave $id" + $canv bind $t "lineclick %x %y $id 1" +} + +proc drawtags {id x xt y1} { + global idtags idheads idotherrefs mainhead + global linespc lthickness + global canv rowtextx curview fgcolor bgcolor ctxbut + + set marks {} + set ntags 0 + set nheads 0 + if {[info exists idtags($id)]} { + set marks $idtags($id) + set ntags [llength $marks] + } + if {[info exists idheads($id)]} { + set marks [concat $marks $idheads($id)] + set nheads [llength $idheads($id)] + } + if {[info exists idotherrefs($id)]} { + set marks [concat $marks $idotherrefs($id)] + } + if {$marks eq {}} { + return $xt + } + + set delta [expr {int(0.5 * ($linespc - $lthickness))}] + set yt [expr {$y1 - 0.5 * $linespc}] + set yb [expr {$yt + $linespc - 1}] + set xvals {} + set wvals {} + set i -1 + foreach tag $marks { + incr i + if {$i >= $ntags && $i < $ntags + $nheads && $tag eq $mainhead} { + set wid [font measure mainfontbold $tag] + } else { + set wid [font measure mainfont $tag] + } + lappend xvals $xt + lappend wvals $wid + set xt [expr {$xt + $delta + $wid + $lthickness + $linespc}] + } + set t [$canv create line $x $y1 [lindex $xvals end] $y1 \ + -width $lthickness -fill black -tags tag.$id] + $canv lower $t + foreach tag $marks x $xvals wid $wvals { + set tag_quoted [string map {% %%} $tag] + set xl [expr {$x + $delta}] + set xr [expr {$x + $delta + $wid + $lthickness}] + set font mainfont + if {[incr ntags -1] >= 0} { + # draw a tag + set t [$canv create polygon $x [expr {$yt + $delta}] $xl $yt \ + $xr $yt $xr $yb $xl $yb $x [expr {$yb - $delta}] \ + -width 1 -outline black -fill yellow -tags tag.$id] + $canv bind $t <1> [list showtag $tag_quoted 1] + set rowtextx([rowofcommit $id]) [expr {$xr + $linespc}] + } else { + # draw a head or other ref + if {[incr nheads -1] >= 0} { + set col green + if {$tag eq $mainhead} { + set font mainfontbold + } + } else { + set col "#ddddff" + } + set xl [expr {$xl - $delta/2}] + $canv create polygon $x $yt $xr $yt $xr $yb $x $yb \ + -width 1 -outline black -fill $col -tags tag.$id + if {[regexp {^(remotes/.*/|remotes/)} $tag match remoteprefix]} { + set rwid [font measure mainfont $remoteprefix] + set xi [expr {$x + 1}] + set yti [expr {$yt + 1}] + set xri [expr {$x + $rwid}] + $canv create polygon $xi $yti $xri $yti $xri $yb $xi $yb \ + -width 0 -fill "#ffddaa" -tags tag.$id + } + } + set t [$canv create text $xl $y1 -anchor w -text $tag -fill $fgcolor \ + -font $font -tags [list tag.$id text]] + if {$ntags >= 0} { + $canv bind $t <1> [list showtag $tag_quoted 1] + } elseif {$nheads >= 0} { + $canv bind $t $ctxbut [list headmenu %X %Y $id $tag_quoted] + } + } + return $xt +} + +proc xcoord {i level ln} { + global canvx0 xspc1 xspc2 + + set x [expr {$canvx0 + $i * $xspc1($ln)}] + if {$i > 0 && $i == $level} { + set x [expr {$x + 0.5 * ($xspc2 - $xspc1($ln))}] + } elseif {$i > $level} { + set x [expr {$x + $xspc2 - $xspc1($ln)}] + } + return $x +} + +proc show_status {msg} { + global canv fgcolor + + clear_display + $canv create text 3 3 -anchor nw -text $msg -font mainfont \ + -tags text -fill $fgcolor +} + +# Don't change the text pane cursor if it is currently the hand cursor, +# showing that we are over a sha1 ID link. +proc settextcursor {c} { + global ctext curtextcursor + + if {[$ctext cget -cursor] == $curtextcursor} { + $ctext config -cursor $c + } + set curtextcursor $c +} + +proc nowbusy {what {name {}}} { + global isbusy busyname statusw + + if {[array names isbusy] eq {}} { + . config -cursor watch + settextcursor watch + } + set isbusy($what) 1 + set busyname($what) $name + if {$name ne {}} { + $statusw conf -text $name + } +} + +proc notbusy {what} { + global isbusy maincursor textcursor busyname statusw + + catch { + unset isbusy($what) + if {$busyname($what) ne {} && + [$statusw cget -text] eq $busyname($what)} { + $statusw conf -text {} + } + } + if {[array names isbusy] eq {}} { + . config -cursor $maincursor + settextcursor $textcursor + } +} + +proc findmatches {f} { + global findtype findstring + if {$findtype == [mc "Regexp"]} { + set matches [regexp -indices -all -inline $findstring $f] + } else { + set fs $findstring + if {$findtype == [mc "IgnCase"]} { + set f [string tolower $f] + set fs [string tolower $fs] + } + set matches {} + set i 0 + set l [string length $fs] + while {[set j [string first $fs $f $i]] >= 0} { + lappend matches [list $j [expr {$j+$l-1}]] + set i [expr {$j + $l}] + } + } + return $matches +} + +proc dofind {{dirn 1} {wrap 1}} { + global findstring findstartline findcurline selectedline numcommits + global gdttype filehighlight fh_serial find_dirn findallowwrap + + if {[info exists find_dirn]} { + if {$find_dirn == $dirn} return + stopfinding + } + focus . + if {$findstring eq {} || $numcommits == 0} return + if {$selectedline eq {}} { + set findstartline [lindex [visiblerows] [expr {$dirn < 0}]] + } else { + set findstartline $selectedline + } + set findcurline $findstartline + nowbusy finding [mc "Searching"] + if {$gdttype ne [mc "containing:"] && ![info exists filehighlight]} { + after cancel do_file_hl $fh_serial + do_file_hl $fh_serial + } + set find_dirn $dirn + set findallowwrap $wrap + run findmore +} + +proc stopfinding {} { + global find_dirn findcurline fprogcoord + + if {[info exists find_dirn]} { + unset find_dirn + unset findcurline + notbusy finding + set fprogcoord 0 + adjustprogress + } + stopblaming +} + +proc findmore {} { + global commitdata commitinfo numcommits findpattern findloc + global findstartline findcurline findallowwrap + global find_dirn gdttype fhighlights fprogcoord + global curview varcorder vrownum varccommits vrowmod + + if {![info exists find_dirn]} { + return 0 + } + set fldtypes [list [mc "Headline"] [mc "Author"] [mc "Date"] [mc "Committer"] [mc "CDate"] [mc "Comments"]] + set l $findcurline + set moretodo 0 + if {$find_dirn > 0} { + incr l + if {$l >= $numcommits} { + set l 0 + } + if {$l <= $findstartline} { + set lim [expr {$findstartline + 1}] + } else { + set lim $numcommits + set moretodo $findallowwrap + } + } else { + if {$l == 0} { + set l $numcommits + } + incr l -1 + if {$l >= $findstartline} { + set lim [expr {$findstartline - 1}] + } else { + set lim -1 + set moretodo $findallowwrap + } + } + set n [expr {($lim - $l) * $find_dirn}] + if {$n > 500} { + set n 500 + set moretodo 1 + } + if {$l + ($find_dirn > 0? $n: 1) > $vrowmod($curview)} { + update_arcrows $curview + } + set found 0 + set domore 1 + set ai [bsearch $vrownum($curview) $l] + set a [lindex $varcorder($curview) $ai] + set arow [lindex $vrownum($curview) $ai] + set ids [lindex $varccommits($curview,$a)] + set arowend [expr {$arow + [llength $ids]}] + if {$gdttype eq [mc "containing:"]} { + for {} {$n > 0} {incr n -1; incr l $find_dirn} { + if {$l < $arow || $l >= $arowend} { + incr ai $find_dirn + set a [lindex $varcorder($curview) $ai] + set arow [lindex $vrownum($curview) $ai] + set ids [lindex $varccommits($curview,$a)] + set arowend [expr {$arow + [llength $ids]}] + } + set id [lindex $ids [expr {$l - $arow}]] + # shouldn't happen unless git log doesn't give all the commits... + if {![info exists commitdata($id)] || + ![doesmatch $commitdata($id)]} { + continue + } + if {![info exists commitinfo($id)]} { + getcommit $id + } + set info $commitinfo($id) + foreach f $info ty $fldtypes { + if {($findloc eq [mc "All fields"] || $findloc eq $ty) && + [doesmatch $f]} { + set found 1 + break + } + } + if {$found} break + } + } else { + for {} {$n > 0} {incr n -1; incr l $find_dirn} { + if {$l < $arow || $l >= $arowend} { + incr ai $find_dirn + set a [lindex $varcorder($curview) $ai] + set arow [lindex $vrownum($curview) $ai] + set ids [lindex $varccommits($curview,$a)] + set arowend [expr {$arow + [llength $ids]}] + } + set id [lindex $ids [expr {$l - $arow}]] + if {![info exists fhighlights($id)]} { + # this sets fhighlights($id) to -1 + askfilehighlight $l $id + } + if {$fhighlights($id) > 0} { + set found $domore + break + } + if {$fhighlights($id) < 0} { + if {$domore} { + set domore 0 + set findcurline [expr {$l - $find_dirn}] + } + } + } + } + if {$found || ($domore && !$moretodo)} { + unset findcurline + unset find_dirn + notbusy finding + set fprogcoord 0 + adjustprogress + if {$found} { + findselectline $l + } else { + bell + } + return 0 + } + if {!$domore} { + flushhighlights + } else { + set findcurline [expr {$l - $find_dirn}] + } + set n [expr {($findcurline - $findstartline) * $find_dirn - 1}] + if {$n < 0} { + incr n $numcommits + } + set fprogcoord [expr {$n * 1.0 / $numcommits}] + adjustprogress + return $domore +} + +proc findselectline {l} { + global findloc commentend ctext findcurline markingmatches gdttype + + set markingmatches [expr {$gdttype eq [mc "containing:"]}] + set findcurline $l + selectline $l 1 + if {$markingmatches && + ($findloc eq [mc "All fields"] || $findloc eq [mc "Comments"])} { + # highlight the matches in the comments + set f [$ctext get 1.0 $commentend] + set matches [findmatches $f] + foreach match $matches { + set start [lindex $match 0] + set end [expr {[lindex $match 1] + 1}] + $ctext tag add found "1.0 + $start c" "1.0 + $end c" + } + } + drawvisible +} + +# mark the bits of a headline or author that match a find string +proc markmatches {canv l str tag matches font row} { + global selectedline + + set bbox [$canv bbox $tag] + set x0 [lindex $bbox 0] + set y0 [lindex $bbox 1] + set y1 [lindex $bbox 3] + foreach match $matches { + set start [lindex $match 0] + set end [lindex $match 1] + if {$start > $end} continue + set xoff [font measure $font [string range $str 0 [expr {$start-1}]]] + set xlen [font measure $font [string range $str 0 [expr {$end}]]] + set t [$canv create rect [expr {$x0+$xoff}] $y0 \ + [expr {$x0+$xlen+2}] $y1 \ + -outline {} -tags [list match$l matches] -fill yellow] + $canv lower $t + if {$row == $selectedline} { + $canv raise $t secsel + } + } +} + +proc unmarkmatches {} { + global markingmatches + + allcanvs delete matches + set markingmatches 0 + stopfinding +} + +proc selcanvline {w x y} { + global canv canvy0 ctext linespc + global rowtextx + set ymax [lindex [$canv cget -scrollregion] 3] + if {$ymax == {}} return + set yfrac [lindex [$canv yview] 0] + set y [expr {$y + $yfrac * $ymax}] + set l [expr {int(($y - $canvy0) / $linespc + 0.5)}] + if {$l < 0} { + set l 0 + } + if {$w eq $canv} { + set xmax [lindex [$canv cget -scrollregion] 2] + set xleft [expr {[lindex [$canv xview] 0] * $xmax}] + if {![info exists rowtextx($l)] || $xleft + $x < $rowtextx($l)} return + } + unmarkmatches + selectline $l 1 +} + +proc commit_descriptor {p} { + global commitinfo + if {![info exists commitinfo($p)]} { + getcommit $p + } + set l "..." + if {[llength $commitinfo($p)] > 1} { + set l [lindex $commitinfo($p) 0] + } + return "$p ($l)\n" +} + +# append some text to the ctext widget, and make any SHA1 ID +# that we know about be a clickable link. +proc appendwithlinks {text tags} { + global ctext linknum curview + + set start [$ctext index "end - 1c"] + $ctext insert end $text $tags + set links [regexp -indices -all -inline {\m[0-9a-f]{6,40}\M} $text] + foreach l $links { + set s [lindex $l 0] + set e [lindex $l 1] + set linkid [string range $text $s $e] + incr e + $ctext tag delete link$linknum + $ctext tag add link$linknum "$start + $s c" "$start + $e c" + setlink $linkid link$linknum + incr linknum + } +} + +proc setlink {id lk} { + global curview ctext pendinglinks + + set known 0 + if {[string length $id] < 40} { + set matches [longid $id] + if {[llength $matches] > 0} { + if {[llength $matches] > 1} return + set known 1 + set id [lindex $matches 0] + } + } else { + set known [commitinview $id $curview] + } + if {$known} { + $ctext tag conf $lk -foreground blue -underline 1 + $ctext tag bind $lk <1> [list selbyid $id] + $ctext tag bind $lk {linkcursor %W 1} + $ctext tag bind $lk {linkcursor %W -1} + } else { + lappend pendinglinks($id) $lk + interestedin $id {makelink %P} + } +} + +proc appendshortlink {id {pre {}} {post {}}} { + global ctext linknum + + $ctext insert end $pre + $ctext tag delete link$linknum + $ctext insert end [string range $id 0 7] link$linknum + $ctext insert end $post + setlink $id link$linknum + incr linknum +} + +proc makelink {id} { + global pendinglinks + + if {![info exists pendinglinks($id)]} return + foreach lk $pendinglinks($id) { + setlink $id $lk + } + unset pendinglinks($id) +} + +proc linkcursor {w inc} { + global linkentercount curtextcursor + + if {[incr linkentercount $inc] > 0} { + $w configure -cursor hand2 + } else { + $w configure -cursor $curtextcursor + if {$linkentercount < 0} { + set linkentercount 0 + } + } +} + +proc viewnextline {dir} { + global canv linespc + + $canv delete hover + set ymax [lindex [$canv cget -scrollregion] 3] + set wnow [$canv yview] + set wtop [expr {[lindex $wnow 0] * $ymax}] + set newtop [expr {$wtop + $dir * $linespc}] + if {$newtop < 0} { + set newtop 0 + } elseif {$newtop > $ymax} { + set newtop $ymax + } + allcanvs yview moveto [expr {$newtop * 1.0 / $ymax}] +} + +# add a list of tag or branch names at position pos +# returns the number of names inserted +proc appendrefs {pos ids var} { + global ctext linknum curview $var maxrefs + + if {[catch {$ctext index $pos}]} { + return 0 + } + $ctext conf -state normal + $ctext delete $pos "$pos lineend" + set tags {} + foreach id $ids { + foreach tag [set $var\($id\)] { + lappend tags [list $tag $id] + } + } + if {[llength $tags] > $maxrefs} { + $ctext insert $pos "[mc "many"] ([llength $tags])" + } else { + set tags [lsort -index 0 -decreasing $tags] + set sep {} + foreach ti $tags { + set id [lindex $ti 1] + set lk link$linknum + incr linknum + $ctext tag delete $lk + $ctext insert $pos $sep + $ctext insert $pos [lindex $ti 0] $lk + setlink $id $lk + set sep ", " + } + } + $ctext conf -state disabled + return [llength $tags] +} + +# called when we have finished computing the nearby tags +proc dispneartags {delay} { + global selectedline currentid showneartags tagphase + + if {$selectedline eq {} || !$showneartags} return + after cancel dispnexttag + if {$delay} { + after 200 dispnexttag + set tagphase -1 + } else { + after idle dispnexttag + set tagphase 0 + } +} + +proc dispnexttag {} { + global selectedline currentid showneartags tagphase ctext + + if {$selectedline eq {} || !$showneartags} return + switch -- $tagphase { + 0 { + set dtags [desctags $currentid] + if {$dtags ne {}} { + appendrefs precedes $dtags idtags + } + } + 1 { + set atags [anctags $currentid] + if {$atags ne {}} { + appendrefs follows $atags idtags + } + } + 2 { + set dheads [descheads $currentid] + if {$dheads ne {}} { + if {[appendrefs branch $dheads idheads] > 1 + && [$ctext get "branch -3c"] eq "h"} { + # turn "Branch" into "Branches" + $ctext conf -state normal + $ctext insert "branch -2c" "es" + $ctext conf -state disabled + } + } + } + } + if {[incr tagphase] <= 2} { + after idle dispnexttag + } +} + +proc make_secsel {id} { + global linehtag linentag linedtag canv canv2 canv3 + + if {![info exists linehtag($id)]} return + $canv delete secsel + set t [eval $canv create rect [$canv bbox $linehtag($id)] -outline {{}} \ + -tags secsel -fill [$canv cget -selectbackground]] + $canv lower $t + $canv2 delete secsel + set t [eval $canv2 create rect [$canv2 bbox $linentag($id)] -outline {{}} \ + -tags secsel -fill [$canv2 cget -selectbackground]] + $canv2 lower $t + $canv3 delete secsel + set t [eval $canv3 create rect [$canv3 bbox $linedtag($id)] -outline {{}} \ + -tags secsel -fill [$canv3 cget -selectbackground]] + $canv3 lower $t +} + +proc make_idmark {id} { + global linehtag canv fgcolor + + if {![info exists linehtag($id)]} return + $canv delete markid + set t [eval $canv create rect [$canv bbox $linehtag($id)] \ + -tags markid -outline $fgcolor] + $canv raise $t +} + +proc selectline {l isnew {desired_loc {}}} { + global canv ctext commitinfo selectedline + global canvy0 linespc parents children curview + global currentid sha1entry + global commentend idtags linknum + global mergemax numcommits pending_select + global cmitmode showneartags allcommits + global targetrow targetid lastscrollrows + global autoselect autosellen jump_to_here + + catch {unset pending_select} + $canv delete hover + normalline + unsel_reflist + stopfinding + if {$l < 0 || $l >= $numcommits} return + set id [commitonrow $l] + set targetid $id + set targetrow $l + set selectedline $l + set currentid $id + if {$lastscrollrows < $numcommits} { + setcanvscroll + } + + set y [expr {$canvy0 + $l * $linespc}] + set ymax [lindex [$canv cget -scrollregion] 3] + set ytop [expr {$y - $linespc - 1}] + set ybot [expr {$y + $linespc + 1}] + set wnow [$canv yview] + set wtop [expr {[lindex $wnow 0] * $ymax}] + set wbot [expr {[lindex $wnow 1] * $ymax}] + set wh [expr {$wbot - $wtop}] + set newtop $wtop + if {$ytop < $wtop} { + if {$ybot < $wtop} { + set newtop [expr {$y - $wh / 2.0}] + } else { + set newtop $ytop + if {$newtop > $wtop - $linespc} { + set newtop [expr {$wtop - $linespc}] + } + } + } elseif {$ybot > $wbot} { + if {$ytop > $wbot} { + set newtop [expr {$y - $wh / 2.0}] + } else { + set newtop [expr {$ybot - $wh}] + if {$newtop < $wtop + $linespc} { + set newtop [expr {$wtop + $linespc}] + } + } + } + if {$newtop != $wtop} { + if {$newtop < 0} { + set newtop 0 + } + allcanvs yview moveto [expr {$newtop * 1.0 / $ymax}] + drawvisible + } + + make_secsel $id + + if {$isnew} { + addtohistory [list selbyid $id 0] savecmitpos + } + + $sha1entry delete 0 end + $sha1entry insert 0 $id + if {$autoselect} { + $sha1entry selection range 0 $autosellen + } + rhighlight_sel $id + + $ctext conf -state normal + clear_ctext + set linknum 0 + if {![info exists commitinfo($id)]} { + getcommit $id + } + set info $commitinfo($id) + set date [formatdate [lindex $info 2]] + $ctext insert end "[mc "Author"]: [lindex $info 1] $date\n" + set date [formatdate [lindex $info 4]] + $ctext insert end "[mc "Committer"]: [lindex $info 3] $date\n" + if {[info exists idtags($id)]} { + $ctext insert end [mc "Tags:"] + foreach tag $idtags($id) { + $ctext insert end " $tag" + } + $ctext insert end "\n" + } + + set headers {} + set olds $parents($curview,$id) + if {[llength $olds] > 1} { + set np 0 + foreach p $olds { + if {$np >= $mergemax} { + set tag mmax + } else { + set tag m$np + } + $ctext insert end "[mc "Parent"]: " $tag + appendwithlinks [commit_descriptor $p] {} + incr np + } + } else { + foreach p $olds { + append headers "[mc "Parent"]: [commit_descriptor $p]" + } + } + + foreach c $children($curview,$id) { + append headers "[mc "Child"]: [commit_descriptor $c]" + } + + # make anything that looks like a SHA1 ID be a clickable link + appendwithlinks $headers {} + if {$showneartags} { + if {![info exists allcommits]} { + getallcommits + } + $ctext insert end "[mc "Branch"]: " + $ctext mark set branch "end -1c" + $ctext mark gravity branch left + $ctext insert end "\n[mc "Follows"]: " + $ctext mark set follows "end -1c" + $ctext mark gravity follows left + $ctext insert end "\n[mc "Precedes"]: " + $ctext mark set precedes "end -1c" + $ctext mark gravity precedes left + $ctext insert end "\n" + dispneartags 1 + } + $ctext insert end "\n" + set comment [lindex $info 5] + if {[string first "\r" $comment] >= 0} { + set comment [string map {"\r" "\n "} $comment] + } + appendwithlinks $comment {comment} + + $ctext tag remove found 1.0 end + $ctext conf -state disabled + set commentend [$ctext index "end - 1c"] + + set jump_to_here $desired_loc + init_flist [mc "Comments"] + if {$cmitmode eq "tree"} { + gettree $id + } elseif {[llength $olds] <= 1} { + startdiff $id + } else { + mergediff $id + } +} + +proc selfirstline {} { + unmarkmatches + selectline 0 1 +} + +proc sellastline {} { + global numcommits + unmarkmatches + set l [expr {$numcommits - 1}] + selectline $l 1 +} + +proc selnextline {dir} { + global selectedline + focus . + if {$selectedline eq {}} return + set l [expr {$selectedline + $dir}] + unmarkmatches + selectline $l 1 +} + +proc selnextpage {dir} { + global canv linespc selectedline numcommits + + set lpp [expr {([winfo height $canv] - 2) / $linespc}] + if {$lpp < 1} { + set lpp 1 + } + allcanvs yview scroll [expr {$dir * $lpp}] units + drawvisible + if {$selectedline eq {}} return + set l [expr {$selectedline + $dir * $lpp}] + if {$l < 0} { + set l 0 + } elseif {$l >= $numcommits} { + set l [expr $numcommits - 1] + } + unmarkmatches + selectline $l 1 +} + +proc unselectline {} { + global selectedline currentid + + set selectedline {} + catch {unset currentid} + allcanvs delete secsel + rhighlight_none +} + +proc reselectline {} { + global selectedline + + if {$selectedline ne {}} { + selectline $selectedline 0 + } +} + +proc addtohistory {cmd {saveproc {}}} { + global history historyindex curview + + unset_posvars + save_position + set elt [list $curview $cmd $saveproc {}] + if {$historyindex > 0 + && [lindex $history [expr {$historyindex - 1}]] == $elt} { + return + } + + if {$historyindex < [llength $history]} { + set history [lreplace $history $historyindex end $elt] + } else { + lappend history $elt + } + incr historyindex + if {$historyindex > 1} { + .tf.bar.leftbut conf -state normal + } else { + .tf.bar.leftbut conf -state disabled + } + .tf.bar.rightbut conf -state disabled +} + +# save the scrolling position of the diff display pane +proc save_position {} { + global historyindex history + + if {$historyindex < 1} return + set hi [expr {$historyindex - 1}] + set fn [lindex $history $hi 2] + if {$fn ne {}} { + lset history $hi 3 [eval $fn] + } +} + +proc unset_posvars {} { + global last_posvars + + if {[info exists last_posvars]} { + foreach {var val} $last_posvars { + global $var + catch {unset $var} + } + unset last_posvars + } +} + +proc godo {elt} { + global curview last_posvars + + set view [lindex $elt 0] + set cmd [lindex $elt 1] + set pv [lindex $elt 3] + if {$curview != $view} { + showview $view + } + unset_posvars + foreach {var val} $pv { + global $var + set $var $val + } + set last_posvars $pv + eval $cmd +} + +proc goback {} { + global history historyindex + focus . + + if {$historyindex > 1} { + save_position + incr historyindex -1 + godo [lindex $history [expr {$historyindex - 1}]] + .tf.bar.rightbut conf -state normal + } + if {$historyindex <= 1} { + .tf.bar.leftbut conf -state disabled + } +} + +proc goforw {} { + global history historyindex + focus . + + if {$historyindex < [llength $history]} { + save_position + set cmd [lindex $history $historyindex] + incr historyindex + godo $cmd + .tf.bar.leftbut conf -state normal + } + if {$historyindex >= [llength $history]} { + .tf.bar.rightbut conf -state disabled + } +} + +proc gettree {id} { + global treefilelist treeidlist diffids diffmergeid treepending + global nullid nullid2 + + set diffids $id + catch {unset diffmergeid} + if {![info exists treefilelist($id)]} { + if {![info exists treepending]} { + if {$id eq $nullid} { + set cmd [list | git ls-files] + } elseif {$id eq $nullid2} { + set cmd [list | git ls-files --stage -t] + } else { + set cmd [list | git ls-tree -r $id] + } + if {[catch {set gtf [open $cmd r]}]} { + return + } + set treepending $id + set treefilelist($id) {} + set treeidlist($id) {} + fconfigure $gtf -blocking 0 -encoding binary + filerun $gtf [list gettreeline $gtf $id] + } + } else { + setfilelist $id + } +} + +proc gettreeline {gtf id} { + global treefilelist treeidlist treepending cmitmode diffids nullid nullid2 + + set nl 0 + while {[incr nl] <= 1000 && [gets $gtf line] >= 0} { + if {$diffids eq $nullid} { + set fname $line + } else { + set i [string first "\t" $line] + if {$i < 0} continue + set fname [string range $line [expr {$i+1}] end] + set line [string range $line 0 [expr {$i-1}]] + if {$diffids ne $nullid2 && [lindex $line 1] ne "blob"} continue + set sha1 [lindex $line 2] + lappend treeidlist($id) $sha1 + } + if {[string index $fname 0] eq "\""} { + set fname [lindex $fname 0] + } + set fname [encoding convertfrom $fname] + lappend treefilelist($id) $fname + } + if {![eof $gtf]} { + return [expr {$nl >= 1000? 2: 1}] + } + close $gtf + unset treepending + if {$cmitmode ne "tree"} { + if {![info exists diffmergeid]} { + gettreediffs $diffids + } + } elseif {$id ne $diffids} { + gettree $diffids + } else { + setfilelist $id + } + return 0 +} + +proc showfile {f} { + global treefilelist treeidlist diffids nullid nullid2 + global ctext_file_names ctext_file_lines + global ctext commentend + + set i [lsearch -exact $treefilelist($diffids) $f] + if {$i < 0} { + puts "oops, $f not in list for id $diffids" + return + } + if {$diffids eq $nullid} { + if {[catch {set bf [open $f r]} err]} { + puts "oops, can't read $f: $err" + return + } + } else { + set blob [lindex $treeidlist($diffids) $i] + if {[catch {set bf [open [concat | git cat-file blob $blob] r]} err]} { + puts "oops, error reading blob $blob: $err" + return + } + } + fconfigure $bf -blocking 0 -encoding [get_path_encoding $f] + filerun $bf [list getblobline $bf $diffids] + $ctext config -state normal + clear_ctext $commentend + lappend ctext_file_names $f + lappend ctext_file_lines [lindex [split $commentend "."] 0] + $ctext insert end "\n" + $ctext insert end "$f\n" filesep + $ctext config -state disabled + $ctext yview $commentend + settabs 0 +} + +proc getblobline {bf id} { + global diffids cmitmode ctext + + if {$id ne $diffids || $cmitmode ne "tree"} { + catch {close $bf} + return 0 + } + $ctext config -state normal + set nl 0 + while {[incr nl] <= 1000 && [gets $bf line] >= 0} { + $ctext insert end "$line\n" + } + if {[eof $bf]} { + global jump_to_here ctext_file_names commentend + + # delete last newline + $ctext delete "end - 2c" "end - 1c" + close $bf + if {$jump_to_here ne {} && + [lindex $jump_to_here 0] eq [lindex $ctext_file_names 0]} { + set lnum [expr {[lindex $jump_to_here 1] + + [lindex [split $commentend .] 0]}] + mark_ctext_line $lnum + } + $ctext config -state disabled + return 0 + } + $ctext config -state disabled + return [expr {$nl >= 1000? 2: 1}] +} + +proc mark_ctext_line {lnum} { + global ctext markbgcolor + + $ctext tag delete omark + $ctext tag add omark $lnum.0 "$lnum.0 + 1 line" + $ctext tag conf omark -background $markbgcolor + $ctext see $lnum.0 +} + +proc mergediff {id} { + global diffmergeid + global diffids treediffs + global parents curview + + set diffmergeid $id + set diffids $id + set treediffs($id) {} + set np [llength $parents($curview,$id)] + settabs $np + getblobdiffs $id +} + +proc startdiff {ids} { + global treediffs diffids treepending diffmergeid nullid nullid2 + + settabs 1 + set diffids $ids + catch {unset diffmergeid} + if {![info exists treediffs($ids)] || + [lsearch -exact $ids $nullid] >= 0 || + [lsearch -exact $ids $nullid2] >= 0} { + if {![info exists treepending]} { + gettreediffs $ids + } + } else { + addtocflist $ids + } +} + +proc path_filter {filter name} { + foreach p $filter { + set l [string length $p] + if {[string index $p end] eq "/"} { + if {[string compare -length $l $p $name] == 0} { + return 1 + } + } else { + if {[string compare -length $l $p $name] == 0 && + ([string length $name] == $l || + [string index $name $l] eq "/")} { + return 1 + } + } + } + return 0 +} + +proc addtocflist {ids} { + global treediffs + + add_flist $treediffs($ids) + getblobdiffs $ids +} + +proc diffcmd {ids flags} { + global nullid nullid2 + + set i [lsearch -exact $ids $nullid] + set j [lsearch -exact $ids $nullid2] + if {$i >= 0} { + if {[llength $ids] > 1 && $j < 0} { + # comparing working directory with some specific revision + set cmd [concat | git diff-index $flags] + if {$i == 0} { + lappend cmd -R [lindex $ids 1] + } else { + lappend cmd [lindex $ids 0] + } + } else { + # comparing working directory with index + set cmd [concat | git diff-files $flags] + if {$j == 1} { + lappend cmd -R + } + } + } elseif {$j >= 0} { + set cmd [concat | git diff-index --cached $flags] + if {[llength $ids] > 1} { + # comparing index with specific revision + if {$j == 0} { + lappend cmd -R [lindex $ids 1] + } else { + lappend cmd [lindex $ids 0] + } + } else { + # comparing index with HEAD + lappend cmd HEAD + } + } else { + set cmd [concat | git diff-tree -r $flags $ids] + } + return $cmd +} + +proc gettreediffs {ids} { + global treediff treepending + + if {[catch {set gdtf [open [diffcmd $ids {--no-commit-id}] r]}]} return + + set treepending $ids + set treediff {} + fconfigure $gdtf -blocking 0 -encoding binary + filerun $gdtf [list gettreediffline $gdtf $ids] +} + +proc gettreediffline {gdtf ids} { + global treediff treediffs treepending diffids diffmergeid + global cmitmode vfilelimit curview limitdiffs perfile_attrs + + set nr 0 + set sublist {} + set max 1000 + if {$perfile_attrs} { + # cache_gitattr is slow, and even slower on win32 where we + # have to invoke it for only about 30 paths at a time + set max 500 + if {[tk windowingsystem] == "win32"} { + set max 120 + } + } + while {[incr nr] <= $max && [gets $gdtf line] >= 0} { + set i [string first "\t" $line] + if {$i >= 0} { + set file [string range $line [expr {$i+1}] end] + if {[string index $file 0] eq "\""} { + set file [lindex $file 0] + } + set file [encoding convertfrom $file] + if {$file ne [lindex $treediff end]} { + lappend treediff $file + lappend sublist $file + } + } + } + if {$perfile_attrs} { + cache_gitattr encoding $sublist + } + if {![eof $gdtf]} { + return [expr {$nr >= $max? 2: 1}] + } + close $gdtf + if {$limitdiffs && $vfilelimit($curview) ne {}} { + set flist {} + foreach f $treediff { + if {[path_filter $vfilelimit($curview) $f]} { + lappend flist $f + } + } + set treediffs($ids) $flist + } else { + set treediffs($ids) $treediff + } + unset treepending + if {$cmitmode eq "tree" && [llength $diffids] == 1} { + gettree $diffids + } elseif {$ids != $diffids} { + if {![info exists diffmergeid]} { + gettreediffs $diffids + } + } else { + addtocflist $ids + } + return 0 +} + +# empty string or positive integer +proc diffcontextvalidate {v} { + return [regexp {^(|[1-9][0-9]*)$} $v] +} + +proc diffcontextchange {n1 n2 op} { + global diffcontextstring diffcontext + + if {[string is integer -strict $diffcontextstring]} { + if {$diffcontextstring >= 0} { + set diffcontext $diffcontextstring + reselectline + } + } +} + +proc changeignorespace {} { + reselectline +} + +proc changeworddiff {name ix op} { + reselectline +} + +proc getblobdiffs {ids} { + global blobdifffd diffids env + global diffinhdr treediffs + global diffcontext + global ignorespace + global worddiff + global limitdiffs vfilelimit curview + global diffencoding targetline diffnparents + global git_version currdiffsubmod + + set textconv {} + if {[package vcompare $git_version "1.6.1"] >= 0} { + set textconv "--textconv" + } + set submodule {} + if {[package vcompare $git_version "1.6.6"] >= 0} { + set submodule "--submodule" + } + set cmd [diffcmd $ids "-p $textconv $submodule -C --cc --no-commit-id -U$diffcontext"] + if {$ignorespace} { + append cmd " -w" + } + if {$worddiff ne [mc "Line diff"]} { + append cmd " --word-diff=porcelain" + } + if {$limitdiffs && $vfilelimit($curview) ne {}} { + set cmd [concat $cmd -- $vfilelimit($curview)] + } + if {[catch {set bdf [open $cmd r]} err]} { + error_popup [mc "Error getting diffs: %s" $err] + return + } + set targetline {} + set diffnparents 0 + set diffinhdr 0 + set diffencoding [get_path_encoding {}] + fconfigure $bdf -blocking 0 -encoding binary -eofchar {} + set blobdifffd($ids) $bdf + set currdiffsubmod "" + filerun $bdf [list getblobdiffline $bdf $diffids] +} + +proc savecmitpos {} { + global ctext cmitmode + + if {$cmitmode eq "tree"} { + return {} + } + return [list target_scrollpos [$ctext index @0,0]] +} + +proc savectextpos {} { + global ctext + + return [list target_scrollpos [$ctext index @0,0]] +} + +proc maybe_scroll_ctext {ateof} { + global ctext target_scrollpos + + if {![info exists target_scrollpos]} return + if {!$ateof} { + set nlines [expr {[winfo height $ctext] + / [font metrics textfont -linespace]}] + if {[$ctext compare "$target_scrollpos + $nlines lines" <= end]} return + } + $ctext yview $target_scrollpos + unset target_scrollpos +} + +proc setinlist {var i val} { + global $var + + while {[llength [set $var]] < $i} { + lappend $var {} + } + if {[llength [set $var]] == $i} { + lappend $var $val + } else { + lset $var $i $val + } +} + +proc makediffhdr {fname ids} { + global ctext curdiffstart treediffs diffencoding + global ctext_file_names jump_to_here targetline diffline + + set fname [encoding convertfrom $fname] + set diffencoding [get_path_encoding $fname] + set i [lsearch -exact $treediffs($ids) $fname] + if {$i >= 0} { + setinlist difffilestart $i $curdiffstart + } + lset ctext_file_names end $fname + set l [expr {(78 - [string length $fname]) / 2}] + set pad [string range "----------------------------------------" 1 $l] + $ctext insert $curdiffstart "$pad $fname $pad" filesep + set targetline {} + if {$jump_to_here ne {} && [lindex $jump_to_here 0] eq $fname} { + set targetline [lindex $jump_to_here 1] + } + set diffline 0 +} + +proc getblobdiffline {bdf ids} { + global diffids blobdifffd ctext curdiffstart + global diffnexthead diffnextnote difffilestart + global ctext_file_names ctext_file_lines + global diffinhdr treediffs mergemax diffnparents + global diffencoding jump_to_here targetline diffline currdiffsubmod + global worddiff + + set nr 0 + $ctext conf -state normal + while {[incr nr] <= 1000 && [gets $bdf line] >= 0} { + if {$ids != $diffids || $bdf != $blobdifffd($ids)} { + catch {close $bdf} + return 0 + } + if {![string compare -length 5 "diff " $line]} { + if {![regexp {^diff (--cc|--git) } $line m type]} { + set line [encoding convertfrom $line] + $ctext insert end "$line\n" hunksep + continue + } + # start of a new file + set diffinhdr 1 + $ctext insert end "\n" + set curdiffstart [$ctext index "end - 1c"] + lappend ctext_file_names "" + lappend ctext_file_lines [lindex [split $curdiffstart "."] 0] + $ctext insert end "\n" filesep + + if {$type eq "--cc"} { + # start of a new file in a merge diff + set fname [string range $line 10 end] + if {[lsearch -exact $treediffs($ids) $fname] < 0} { + lappend treediffs($ids) $fname + add_flist [list $fname] + } + + } else { + set line [string range $line 11 end] + # If the name hasn't changed the length will be odd, + # the middle char will be a space, and the two bits either + # side will be a/name and b/name, or "a/name" and "b/name". + # If the name has changed we'll get "rename from" and + # "rename to" or "copy from" and "copy to" lines following + # this, and we'll use them to get the filenames. + # This complexity is necessary because spaces in the + # filename(s) don't get escaped. + set l [string length $line] + set i [expr {$l / 2}] + if {!(($l & 1) && [string index $line $i] eq " " && + [string range $line 2 [expr {$i - 1}]] eq \ + [string range $line [expr {$i + 3}] end])} { + continue + } + # unescape if quoted and chop off the a/ from the front + if {[string index $line 0] eq "\""} { + set fname [string range [lindex $line 0] 2 end] + } else { + set fname [string range $line 2 [expr {$i - 1}]] + } + } + makediffhdr $fname $ids + + } elseif {![string compare -length 16 "* Unmerged path " $line]} { + set fname [encoding convertfrom [string range $line 16 end]] + $ctext insert end "\n" + set curdiffstart [$ctext index "end - 1c"] + lappend ctext_file_names $fname + lappend ctext_file_lines [lindex [split $curdiffstart "."] 0] + $ctext insert end "$line\n" filesep + set i [lsearch -exact $treediffs($ids) $fname] + if {$i >= 0} { + setinlist difffilestart $i $curdiffstart + } + + } elseif {![string compare -length 2 "@@" $line]} { + regexp {^@@+} $line ats + set line [encoding convertfrom $diffencoding $line] + $ctext insert end "$line\n" hunksep + if {[regexp { \+(\d+),\d+ @@} $line m nl]} { + set diffline $nl + } + set diffnparents [expr {[string length $ats] - 1}] + set diffinhdr 0 + + } elseif {![string compare -length 10 "Submodule " $line]} { + # start of a new submodule + if {[regexp -indices "\[0-9a-f\]+\\.\\." $line nameend]} { + set fname [string range $line 10 [expr [lindex $nameend 0] - 2]] + } else { + set fname [string range $line 10 [expr [string first "contains " $line] - 2]] + } + if {$currdiffsubmod != $fname} { + $ctext insert end "\n"; # Add newline after commit message + } + set curdiffstart [$ctext index "end - 1c"] + lappend ctext_file_names "" + if {$currdiffsubmod != $fname} { + lappend ctext_file_lines $fname + makediffhdr $fname $ids + set currdiffsubmod $fname + $ctext insert end "\n$line\n" filesep + } else { + $ctext insert end "$line\n" filesep + } + } elseif {![string compare -length 3 " >" $line]} { + set $currdiffsubmod "" + set line [encoding convertfrom $diffencoding $line] + $ctext insert end "$line\n" dresult + } elseif {![string compare -length 3 " <" $line]} { + set $currdiffsubmod "" + set line [encoding convertfrom $diffencoding $line] + $ctext insert end "$line\n" d0 + } elseif {$diffinhdr} { + if {![string compare -length 12 "rename from " $line]} { + set fname [string range $line [expr 6 + [string first " from " $line] ] end] + if {[string index $fname 0] eq "\""} { + set fname [lindex $fname 0] + } + set fname [encoding convertfrom $fname] + set i [lsearch -exact $treediffs($ids) $fname] + if {$i >= 0} { + setinlist difffilestart $i $curdiffstart + } + } elseif {![string compare -length 10 $line "rename to "] || + ![string compare -length 8 $line "copy to "]} { + set fname [string range $line [expr 4 + [string first " to " $line] ] end] + if {[string index $fname 0] eq "\""} { + set fname [lindex $fname 0] + } + makediffhdr $fname $ids + } elseif {[string compare -length 3 $line "---"] == 0} { + # do nothing + continue + } elseif {[string compare -length 3 $line "+++"] == 0} { + set diffinhdr 0 + continue + } + $ctext insert end "$line\n" filesep + + } else { + set line [string map {\x1A ^Z} \ + [encoding convertfrom $diffencoding $line]] + # parse the prefix - one ' ', '-' or '+' for each parent + set prefix [string range $line 0 [expr {$diffnparents - 1}]] + set tag [expr {$diffnparents > 1? "m": "d"}] + set dowords [expr {$worddiff ne [mc "Line diff"] && $diffnparents == 1}] + set words_pre_markup "" + set words_post_markup "" + if {[string trim $prefix " -+"] eq {}} { + # prefix only has " ", "-" and "+" in it: normal diff line + set num [string first "-" $prefix] + if {$dowords} { + set line [string range $line 1 end] + } + if {$num >= 0} { + # removed line, first parent with line is $num + if {$num >= $mergemax} { + set num "max" + } + if {$dowords && $worddiff eq [mc "Markup words"]} { + $ctext insert end "\[-$line-\]" $tag$num + } else { + $ctext insert end "$line" $tag$num + } + if {!$dowords} { + $ctext insert end "\n" $tag$num + } + } else { + set tags {} + if {[string first "+" $prefix] >= 0} { + # added line + lappend tags ${tag}result + if {$diffnparents > 1} { + set num [string first " " $prefix] + if {$num >= 0} { + if {$num >= $mergemax} { + set num "max" + } + lappend tags m$num + } + } + set words_pre_markup "{+" + set words_post_markup "+}" + } + if {$targetline ne {}} { + if {$diffline == $targetline} { + set seehere [$ctext index "end - 1 chars"] + set targetline {} + } else { + incr diffline + } + } + if {$dowords && $worddiff eq [mc "Markup words"]} { + $ctext insert end "$words_pre_markup$line$words_post_markup" $tags + } else { + $ctext insert end "$line" $tags + } + if {!$dowords} { + $ctext insert end "\n" $tags + } + } + } elseif {$dowords && $prefix eq "~"} { + $ctext insert end "\n" {} + } else { + # "\ No newline at end of file", + # or something else we don't recognize + $ctext insert end "$line\n" hunksep + } + } + } + if {[info exists seehere]} { + mark_ctext_line [lindex [split $seehere .] 0] + } + maybe_scroll_ctext [eof $bdf] + $ctext conf -state disabled + if {[eof $bdf]} { + catch {close $bdf} + return 0 + } + return [expr {$nr >= 1000? 2: 1}] +} + +proc changediffdisp {} { + global ctext diffelide + + $ctext tag conf d0 -elide [lindex $diffelide 0] + $ctext tag conf dresult -elide [lindex $diffelide 1] +} + +proc highlightfile {loc cline} { + global ctext cflist cflist_top + + $ctext yview $loc + $cflist tag remove highlight $cflist_top.0 "$cflist_top.0 lineend" + $cflist tag add highlight $cline.0 "$cline.0 lineend" + $cflist see $cline.0 + set cflist_top $cline +} + +proc prevfile {} { + global difffilestart ctext cmitmode + + if {$cmitmode eq "tree"} return + set prev 0.0 + set prevline 1 + set here [$ctext index @0,0] + foreach loc $difffilestart { + if {[$ctext compare $loc >= $here]} { + highlightfile $prev $prevline + return + } + set prev $loc + incr prevline + } + highlightfile $prev $prevline +} + +proc nextfile {} { + global difffilestart ctext cmitmode + + if {$cmitmode eq "tree"} return + set here [$ctext index @0,0] + set line 1 + foreach loc $difffilestart { + incr line + if {[$ctext compare $loc > $here]} { + highlightfile $loc $line + return + } + } +} + +proc clear_ctext {{first 1.0}} { + global ctext smarktop smarkbot + global ctext_file_names ctext_file_lines + global pendinglinks + + set l [lindex [split $first .] 0] + if {![info exists smarktop] || [$ctext compare $first < $smarktop.0]} { + set smarktop $l + } + if {![info exists smarkbot] || [$ctext compare $first < $smarkbot.0]} { + set smarkbot $l + } + $ctext delete $first end + if {$first eq "1.0"} { + catch {unset pendinglinks} + } + set ctext_file_names {} + set ctext_file_lines {} +} + +proc settabs {{firstab {}}} { + global firsttabstop tabstop ctext have_tk85 + + if {$firstab ne {} && $have_tk85} { + set firsttabstop $firstab + } + set w [font measure textfont "0"] + if {$firsttabstop != 0} { + $ctext conf -tabs [list [expr {($firsttabstop + $tabstop) * $w}] \ + [expr {($firsttabstop + 2 * $tabstop) * $w}]] + } elseif {$have_tk85 || $tabstop != 8} { + $ctext conf -tabs [expr {$tabstop * $w}] + } else { + $ctext conf -tabs {} + } +} + +proc incrsearch {name ix op} { + global ctext searchstring searchdirn + + $ctext tag remove found 1.0 end + if {[catch {$ctext index anchor}]} { + # no anchor set, use start of selection, or of visible area + set sel [$ctext tag ranges sel] + if {$sel ne {}} { + $ctext mark set anchor [lindex $sel 0] + } elseif {$searchdirn eq "-forwards"} { + $ctext mark set anchor @0,0 + } else { + $ctext mark set anchor @0,[winfo height $ctext] + } + } + if {$searchstring ne {}} { + set here [$ctext search $searchdirn -- $searchstring anchor] + if {$here ne {}} { + $ctext see $here + } + searchmarkvisible 1 + } +} + +proc dosearch {} { + global sstring ctext searchstring searchdirn + + focus $sstring + $sstring icursor end + set searchdirn -forwards + if {$searchstring ne {}} { + set sel [$ctext tag ranges sel] + if {$sel ne {}} { + set start "[lindex $sel 0] + 1c" + } elseif {[catch {set start [$ctext index anchor]}]} { + set start "@0,0" + } + set match [$ctext search -count mlen -- $searchstring $start] + $ctext tag remove sel 1.0 end + if {$match eq {}} { + bell + return + } + $ctext see $match + set mend "$match + $mlen c" + $ctext tag add sel $match $mend + $ctext mark unset anchor + } +} + +proc dosearchback {} { + global sstring ctext searchstring searchdirn + + focus $sstring + $sstring icursor end + set searchdirn -backwards + if {$searchstring ne {}} { + set sel [$ctext tag ranges sel] + if {$sel ne {}} { + set start [lindex $sel 0] + } elseif {[catch {set start [$ctext index anchor]}]} { + set start @0,[winfo height $ctext] + } + set match [$ctext search -backwards -count ml -- $searchstring $start] + $ctext tag remove sel 1.0 end + if {$match eq {}} { + bell + return + } + $ctext see $match + set mend "$match + $ml c" + $ctext tag add sel $match $mend + $ctext mark unset anchor + } +} + +proc searchmark {first last} { + global ctext searchstring + + set mend $first.0 + while {1} { + set match [$ctext search -count mlen -- $searchstring $mend $last.end] + if {$match eq {}} break + set mend "$match + $mlen c" + $ctext tag add found $match $mend + } +} + +proc searchmarkvisible {doall} { + global ctext smarktop smarkbot + + set topline [lindex [split [$ctext index @0,0] .] 0] + set botline [lindex [split [$ctext index @0,[winfo height $ctext]] .] 0] + if {$doall || $botline < $smarktop || $topline > $smarkbot} { + # no overlap with previous + searchmark $topline $botline + set smarktop $topline + set smarkbot $botline + } else { + if {$topline < $smarktop} { + searchmark $topline [expr {$smarktop-1}] + set smarktop $topline + } + if {$botline > $smarkbot} { + searchmark [expr {$smarkbot+1}] $botline + set smarkbot $botline + } + } +} + +proc scrolltext {f0 f1} { + global searchstring + + .bleft.bottom.sb set $f0 $f1 + if {$searchstring ne {}} { + searchmarkvisible 0 + } +} + +proc setcoords {} { + global linespc charspc canvx0 canvy0 + global xspc1 xspc2 lthickness + + set linespc [font metrics mainfont -linespace] + set charspc [font measure mainfont "m"] + set canvy0 [expr {int(3 + 0.5 * $linespc)}] + set canvx0 [expr {int(3 + 0.5 * $linespc)}] + set lthickness [expr {int($linespc / 9) + 1}] + set xspc1(0) $linespc + set xspc2 $linespc +} + +proc redisplay {} { + global canv + global selectedline + + set ymax [lindex [$canv cget -scrollregion] 3] + if {$ymax eq {} || $ymax == 0} return + set span [$canv yview] + clear_display + setcanvscroll + allcanvs yview moveto [lindex $span 0] + drawvisible + if {$selectedline ne {}} { + selectline $selectedline 0 + allcanvs yview moveto [lindex $span 0] + } +} + +proc parsefont {f n} { + global fontattr + + set fontattr($f,family) [lindex $n 0] + set s [lindex $n 1] + if {$s eq {} || $s == 0} { + set s 10 + } elseif {$s < 0} { + set s [expr {int(-$s / [winfo fpixels . 1p] + 0.5)}] + } + set fontattr($f,size) $s + set fontattr($f,weight) normal + set fontattr($f,slant) roman + foreach style [lrange $n 2 end] { + switch -- $style { + "normal" - + "bold" {set fontattr($f,weight) $style} + "roman" - + "italic" {set fontattr($f,slant) $style} + } + } +} + +proc fontflags {f {isbold 0}} { + global fontattr + + return [list -family $fontattr($f,family) -size $fontattr($f,size) \ + -weight [expr {$isbold? "bold": $fontattr($f,weight)}] \ + -slant $fontattr($f,slant)] +} + +proc fontname {f} { + global fontattr + + set n [list $fontattr($f,family) $fontattr($f,size)] + if {$fontattr($f,weight) eq "bold"} { + lappend n "bold" + } + if {$fontattr($f,slant) eq "italic"} { + lappend n "italic" + } + return $n +} + +proc incrfont {inc} { + global mainfont textfont ctext canv cflist showrefstop + global stopped entries fontattr + + unmarkmatches + set s $fontattr(mainfont,size) + incr s $inc + if {$s < 1} { + set s 1 + } + set fontattr(mainfont,size) $s + font config mainfont -size $s + font config mainfontbold -size $s + set mainfont [fontname mainfont] + set s $fontattr(textfont,size) + incr s $inc + if {$s < 1} { + set s 1 + } + set fontattr(textfont,size) $s + font config textfont -size $s + font config textfontbold -size $s + set textfont [fontname textfont] + setcoords + settabs + redisplay +} + +proc clearsha1 {} { + global sha1entry sha1string + if {[string length $sha1string] == 40} { + $sha1entry delete 0 end + } +} + +proc sha1change {n1 n2 op} { + global sha1string currentid sha1but + if {$sha1string == {} + || ([info exists currentid] && $sha1string == $currentid)} { + set state disabled + } else { + set state normal + } + if {[$sha1but cget -state] == $state} return + if {$state == "normal"} { + $sha1but conf -state normal -relief raised -text "[mc "Goto:"] " + } else { + $sha1but conf -state disabled -relief flat -text "[mc "SHA1 ID:"] " + } +} + +proc gotocommit {} { + global sha1string tagids headids curview varcid + + if {$sha1string == {} + || ([info exists currentid] && $sha1string == $currentid)} return + if {[info exists tagids($sha1string)]} { + set id $tagids($sha1string) + } elseif {[info exists headids($sha1string)]} { + set id $headids($sha1string) + } else { + set id [string tolower $sha1string] + if {[regexp {^[0-9a-f]{4,39}$} $id]} { + set matches [longid $id] + if {$matches ne {}} { + if {[llength $matches] > 1} { + error_popup [mc "Short SHA1 id %s is ambiguous" $id] + return + } + set id [lindex $matches 0] + } + } else { + if {[catch {set id [exec git rev-parse --verify $sha1string]}]} { + error_popup [mc "Revision %s is not known" $sha1string] + return + } + } + } + if {[commitinview $id $curview]} { + selectline [rowofcommit $id] 1 + return + } + if {[regexp {^[0-9a-fA-F]{4,}$} $sha1string]} { + set msg [mc "SHA1 id %s is not known" $sha1string] + } else { + set msg [mc "Revision %s is not in the current view" $sha1string] + } + error_popup $msg +} + +proc lineenter {x y id} { + global hoverx hovery hoverid hovertimer + global commitinfo canv + + if {![info exists commitinfo($id)] && ![getcommit $id]} return + set hoverx $x + set hovery $y + set hoverid $id + if {[info exists hovertimer]} { + after cancel $hovertimer + } + set hovertimer [after 500 linehover] + $canv delete hover +} + +proc linemotion {x y id} { + global hoverx hovery hoverid hovertimer + + if {[info exists hoverid] && $id == $hoverid} { + set hoverx $x + set hovery $y + if {[info exists hovertimer]} { + after cancel $hovertimer + } + set hovertimer [after 500 linehover] + } +} + +proc lineleave {id} { + global hoverid hovertimer canv + + if {[info exists hoverid] && $id == $hoverid} { + $canv delete hover + if {[info exists hovertimer]} { + after cancel $hovertimer + unset hovertimer + } + unset hoverid + } +} + +proc linehover {} { + global hoverx hovery hoverid hovertimer + global canv linespc lthickness + global commitinfo + + set text [lindex $commitinfo($hoverid) 0] + set ymax [lindex [$canv cget -scrollregion] 3] + if {$ymax == {}} return + set yfrac [lindex [$canv yview] 0] + set x [expr {$hoverx + 2 * $linespc}] + set y [expr {$hovery + $yfrac * $ymax - $linespc / 2}] + set x0 [expr {$x - 2 * $lthickness}] + set y0 [expr {$y - 2 * $lthickness}] + set x1 [expr {$x + [font measure mainfont $text] + 2 * $lthickness}] + set y1 [expr {$y + $linespc + 2 * $lthickness}] + set t [$canv create rectangle $x0 $y0 $x1 $y1 \ + -fill \#ffff80 -outline black -width 1 -tags hover] + $canv raise $t + set t [$canv create text $x $y -anchor nw -text $text -tags hover \ + -font mainfont] + $canv raise $t +} + +proc clickisonarrow {id y} { + global lthickness + + set ranges [rowranges $id] + set thresh [expr {2 * $lthickness + 6}] + set n [expr {[llength $ranges] - 1}] + for {set i 1} {$i < $n} {incr i} { + set row [lindex $ranges $i] + if {abs([yc $row] - $y) < $thresh} { + return $i + } + } + return {} +} + +proc arrowjump {id n y} { + global canv + + # 1 <-> 2, 3 <-> 4, etc... + set n [expr {(($n - 1) ^ 1) + 1}] + set row [lindex [rowranges $id] $n] + set yt [yc $row] + set ymax [lindex [$canv cget -scrollregion] 3] + if {$ymax eq {} || $ymax <= 0} return + set view [$canv yview] + set yspan [expr {[lindex $view 1] - [lindex $view 0]}] + set yfrac [expr {$yt / $ymax - $yspan / 2}] + if {$yfrac < 0} { + set yfrac 0 + } + allcanvs yview moveto $yfrac +} + +proc lineclick {x y id isnew} { + global ctext commitinfo children canv thickerline curview + + if {![info exists commitinfo($id)] && ![getcommit $id]} return + unmarkmatches + unselectline + normalline + $canv delete hover + # draw this line thicker than normal + set thickerline $id + drawlines $id + if {$isnew} { + set ymax [lindex [$canv cget -scrollregion] 3] + if {$ymax eq {}} return + set yfrac [lindex [$canv yview] 0] + set y [expr {$y + $yfrac * $ymax}] + } + set dirn [clickisonarrow $id $y] + if {$dirn ne {}} { + arrowjump $id $dirn $y + return + } + + if {$isnew} { + addtohistory [list lineclick $x $y $id 0] savectextpos + } + # fill the details pane with info about this line + $ctext conf -state normal + clear_ctext + settabs 0 + $ctext insert end "[mc "Parent"]:\t" + $ctext insert end $id link0 + setlink $id link0 + set info $commitinfo($id) + $ctext insert end "\n\t[lindex $info 0]\n" + $ctext insert end "\t[mc "Author"]:\t[lindex $info 1]\n" + set date [formatdate [lindex $info 2]] + $ctext insert end "\t[mc "Date"]:\t$date\n" + set kids $children($curview,$id) + if {$kids ne {}} { + $ctext insert end "\n[mc "Children"]:" + set i 0 + foreach child $kids { + incr i + if {![info exists commitinfo($child)] && ![getcommit $child]} continue + set info $commitinfo($child) + $ctext insert end "\n\t" + $ctext insert end $child link$i + setlink $child link$i + $ctext insert end "\n\t[lindex $info 0]" + $ctext insert end "\n\t[mc "Author"]:\t[lindex $info 1]" + set date [formatdate [lindex $info 2]] + $ctext insert end "\n\t[mc "Date"]:\t$date\n" + } + } + maybe_scroll_ctext 1 + $ctext conf -state disabled + init_flist {} +} + +proc normalline {} { + global thickerline + if {[info exists thickerline]} { + set id $thickerline + unset thickerline + drawlines $id + } +} + +proc selbyid {id {isnew 1}} { + global curview + if {[commitinview $id $curview]} { + selectline [rowofcommit $id] $isnew + } +} + +proc mstime {} { + global startmstime + if {![info exists startmstime]} { + set startmstime [clock clicks -milliseconds] + } + return [format "%.3f" [expr {([clock click -milliseconds] - $startmstime) / 1000.0}]] +} + +proc rowmenu {x y id} { + global rowctxmenu selectedline rowmenuid curview + global nullid nullid2 fakerowmenu mainhead markedid + + stopfinding + set rowmenuid $id + if {$selectedline eq {} || [rowofcommit $id] eq $selectedline} { + set state disabled + } else { + set state normal + } + if {$id ne $nullid && $id ne $nullid2} { + set menu $rowctxmenu + if {$mainhead ne {}} { + $menu entryconfigure 7 -label [mc "Reset %s branch to here" $mainhead] -state normal + } else { + $menu entryconfigure 7 -label [mc "Detached head: can't reset" $mainhead] -state disabled + } + if {[info exists markedid] && $markedid ne $id} { + $menu entryconfigure 9 -state normal + $menu entryconfigure 10 -state normal + $menu entryconfigure 11 -state normal + } else { + $menu entryconfigure 9 -state disabled + $menu entryconfigure 10 -state disabled + $menu entryconfigure 11 -state disabled + } + } else { + set menu $fakerowmenu + } + $menu entryconfigure [mca "Diff this -> selected"] -state $state + $menu entryconfigure [mca "Diff selected -> this"] -state $state + $menu entryconfigure [mca "Make patch"] -state $state + tk_popup $menu $x $y +} + +proc markhere {} { + global rowmenuid markedid canv + + set markedid $rowmenuid + make_idmark $markedid +} + +proc gotomark {} { + global markedid + + if {[info exists markedid]} { + selbyid $markedid + } +} + +proc replace_by_kids {l r} { + global curview children + + set id [commitonrow $r] + set l [lreplace $l 0 0] + foreach kid $children($curview,$id) { + lappend l [rowofcommit $kid] + } + return [lsort -integer -decreasing -unique $l] +} + +proc find_common_desc {} { + global markedid rowmenuid curview children + + if {![info exists markedid]} return + if {![commitinview $markedid $curview] || + ![commitinview $rowmenuid $curview]} return + #set t1 [clock clicks -milliseconds] + set l1 [list [rowofcommit $markedid]] + set l2 [list [rowofcommit $rowmenuid]] + while 1 { + set r1 [lindex $l1 0] + set r2 [lindex $l2 0] + if {$r1 eq {} || $r2 eq {}} break + if {$r1 == $r2} { + selectline $r1 1 + break + } + if {$r1 > $r2} { + set l1 [replace_by_kids $l1 $r1] + } else { + set l2 [replace_by_kids $l2 $r2] + } + } + #set t2 [clock clicks -milliseconds] + #puts "took [expr {$t2-$t1}]ms" +} + +proc compare_commits {} { + global markedid rowmenuid curview children + + if {![info exists markedid]} return + if {![commitinview $markedid $curview]} return + addtohistory [list do_cmp_commits $markedid $rowmenuid] + do_cmp_commits $markedid $rowmenuid +} + +proc getpatchid {id} { + global patchids + + if {![info exists patchids($id)]} { + set cmd [diffcmd [list $id] {-p --root}] + # trim off the initial "|" + set cmd [lrange $cmd 1 end] + if {[catch { + set x [eval exec $cmd | git patch-id] + set patchids($id) [lindex $x 0] + }]} { + set patchids($id) "error" + } + } + return $patchids($id) +} + +proc do_cmp_commits {a b} { + global ctext curview parents children patchids commitinfo + + $ctext conf -state normal + clear_ctext + init_flist {} + for {set i 0} {$i < 100} {incr i} { + set skipa 0 + set skipb 0 + if {[llength $parents($curview,$a)] > 1} { + appendshortlink $a [mc "Skipping merge commit "] "\n" + set skipa 1 + } else { + set patcha [getpatchid $a] + } + if {[llength $parents($curview,$b)] > 1} { + appendshortlink $b [mc "Skipping merge commit "] "\n" + set skipb 1 + } else { + set patchb [getpatchid $b] + } + if {!$skipa && !$skipb} { + set heada [lindex $commitinfo($a) 0] + set headb [lindex $commitinfo($b) 0] + if {$patcha eq "error"} { + appendshortlink $a [mc "Error getting patch ID for "] \ + [mc " - stopping\n"] + break + } + if {$patchb eq "error"} { + appendshortlink $b [mc "Error getting patch ID for "] \ + [mc " - stopping\n"] + break + } + if {$patcha eq $patchb} { + if {$heada eq $headb} { + appendshortlink $a [mc "Commit "] + appendshortlink $b " == " " $heada\n" + } else { + appendshortlink $a [mc "Commit "] " $heada\n" + appendshortlink $b [mc " is the same patch as\n "] \ + " $headb\n" + } + set skipa 1 + set skipb 1 + } else { + $ctext insert end "\n" + appendshortlink $a [mc "Commit "] " $heada\n" + appendshortlink $b [mc " differs from\n "] \ + " $headb\n" + $ctext insert end [mc "Diff of commits:\n\n"] + $ctext conf -state disabled + update + diffcommits $a $b + return + } + } + if {$skipa} { + set kids [real_children $curview,$a] + if {[llength $kids] != 1} { + $ctext insert end "\n" + appendshortlink $a [mc "Commit "] \ + [mc " has %s children - stopping\n" [llength $kids]] + break + } + set a [lindex $kids 0] + } + if {$skipb} { + set kids [real_children $curview,$b] + if {[llength $kids] != 1} { + appendshortlink $b [mc "Commit "] \ + [mc " has %s children - stopping\n" [llength $kids]] + break + } + set b [lindex $kids 0] + } + } + $ctext conf -state disabled +} + +proc diffcommits {a b} { + global diffcontext diffids blobdifffd diffinhdr currdiffsubmod + + set tmpdir [gitknewtmpdir] + set fna [file join $tmpdir "commit-[string range $a 0 7]"] + set fnb [file join $tmpdir "commit-[string range $b 0 7]"] + if {[catch { + exec git diff-tree -p --pretty $a >$fna + exec git diff-tree -p --pretty $b >$fnb + } err]} { + error_popup [mc "Error writing commit to file: %s" $err] + return + } + if {[catch { + set fd [open "| diff -U$diffcontext $fna $fnb" r] + } err]} { + error_popup [mc "Error diffing commits: %s" $err] + return + } + set diffids [list commits $a $b] + set blobdifffd($diffids) $fd + set diffinhdr 0 + set currdiffsubmod "" + filerun $fd [list getblobdiffline $fd $diffids] +} + +proc diffvssel {dirn} { + global rowmenuid selectedline + + if {$selectedline eq {}} return + if {$dirn} { + set oldid [commitonrow $selectedline] + set newid $rowmenuid + } else { + set oldid $rowmenuid + set newid [commitonrow $selectedline] + } + addtohistory [list doseldiff $oldid $newid] savectextpos + doseldiff $oldid $newid +} + +proc doseldiff {oldid newid} { + global ctext + global commitinfo + + $ctext conf -state normal + clear_ctext + init_flist [mc "Top"] + $ctext insert end "[mc "From"] " + $ctext insert end $oldid link0 + setlink $oldid link0 + $ctext insert end "\n " + $ctext insert end [lindex $commitinfo($oldid) 0] + $ctext insert end "\n\n[mc "To"] " + $ctext insert end $newid link1 + setlink $newid link1 + $ctext insert end "\n " + $ctext insert end [lindex $commitinfo($newid) 0] + $ctext insert end "\n" + $ctext conf -state disabled + $ctext tag remove found 1.0 end + startdiff [list $oldid $newid] +} + +proc mkpatch {} { + global rowmenuid currentid commitinfo patchtop patchnum NS + + if {![info exists currentid]} return + set oldid $currentid + set oldhead [lindex $commitinfo($oldid) 0] + set newid $rowmenuid + set newhead [lindex $commitinfo($newid) 0] + set top .patch + set patchtop $top + catch {destroy $top} + ttk_toplevel $top + make_transient $top . + ${NS}::label $top.title -text [mc "Generate patch"] + grid $top.title - -pady 10 + ${NS}::label $top.from -text [mc "From:"] + ${NS}::entry $top.fromsha1 -width 40 + $top.fromsha1 insert 0 $oldid + $top.fromsha1 conf -state readonly + grid $top.from $top.fromsha1 -sticky w + ${NS}::entry $top.fromhead -width 60 + $top.fromhead insert 0 $oldhead + $top.fromhead conf -state readonly + grid x $top.fromhead -sticky w + ${NS}::label $top.to -text [mc "To:"] + ${NS}::entry $top.tosha1 -width 40 + $top.tosha1 insert 0 $newid + $top.tosha1 conf -state readonly + grid $top.to $top.tosha1 -sticky w + ${NS}::entry $top.tohead -width 60 + $top.tohead insert 0 $newhead + $top.tohead conf -state readonly + grid x $top.tohead -sticky w + ${NS}::button $top.rev -text [mc "Reverse"] -command mkpatchrev + grid $top.rev x -pady 10 -padx 5 + ${NS}::label $top.flab -text [mc "Output file:"] + ${NS}::entry $top.fname -width 60 + $top.fname insert 0 [file normalize "patch$patchnum.patch"] + incr patchnum + grid $top.flab $top.fname -sticky w + ${NS}::frame $top.buts + ${NS}::button $top.buts.gen -text [mc "Generate"] -command mkpatchgo + ${NS}::button $top.buts.can -text [mc "Cancel"] -command mkpatchcan + bind $top mkpatchgo + bind $top mkpatchcan + grid $top.buts.gen $top.buts.can + grid columnconfigure $top.buts 0 -weight 1 -uniform a + grid columnconfigure $top.buts 1 -weight 1 -uniform a + grid $top.buts - -pady 10 -sticky ew + focus $top.fname +} + +proc mkpatchrev {} { + global patchtop + + set oldid [$patchtop.fromsha1 get] + set oldhead [$patchtop.fromhead get] + set newid [$patchtop.tosha1 get] + set newhead [$patchtop.tohead get] + foreach e [list fromsha1 fromhead tosha1 tohead] \ + v [list $newid $newhead $oldid $oldhead] { + $patchtop.$e conf -state normal + $patchtop.$e delete 0 end + $patchtop.$e insert 0 $v + $patchtop.$e conf -state readonly + } +} + +proc mkpatchgo {} { + global patchtop nullid nullid2 + + set oldid [$patchtop.fromsha1 get] + set newid [$patchtop.tosha1 get] + set fname [$patchtop.fname get] + set cmd [diffcmd [list $oldid $newid] -p] + # trim off the initial "|" + set cmd [lrange $cmd 1 end] + lappend cmd >$fname & + if {[catch {eval exec $cmd} err]} { + error_popup "[mc "Error creating patch:"] $err" $patchtop + } + catch {destroy $patchtop} + unset patchtop +} + +proc mkpatchcan {} { + global patchtop + + catch {destroy $patchtop} + unset patchtop +} + +proc mktag {} { + global rowmenuid mktagtop commitinfo NS + + set top .maketag + set mktagtop $top + catch {destroy $top} + ttk_toplevel $top + make_transient $top . + ${NS}::label $top.title -text [mc "Create tag"] + grid $top.title - -pady 10 + ${NS}::label $top.id -text [mc "ID:"] + ${NS}::entry $top.sha1 -width 40 + $top.sha1 insert 0 $rowmenuid + $top.sha1 conf -state readonly + grid $top.id $top.sha1 -sticky w + ${NS}::entry $top.head -width 60 + $top.head insert 0 [lindex $commitinfo($rowmenuid) 0] + $top.head conf -state readonly + grid x $top.head -sticky w + ${NS}::label $top.tlab -text [mc "Tag name:"] + ${NS}::entry $top.tag -width 60 + grid $top.tlab $top.tag -sticky w + ${NS}::label $top.op -text [mc "Tag message is optional"] + grid $top.op -columnspan 2 -sticky we + ${NS}::label $top.mlab -text [mc "Tag message:"] + ${NS}::entry $top.msg -width 60 + grid $top.mlab $top.msg -sticky w + ${NS}::frame $top.buts + ${NS}::button $top.buts.gen -text [mc "Create"] -command mktaggo + ${NS}::button $top.buts.can -text [mc "Cancel"] -command mktagcan + bind $top mktaggo + bind $top mktagcan + grid $top.buts.gen $top.buts.can + grid columnconfigure $top.buts 0 -weight 1 -uniform a + grid columnconfigure $top.buts 1 -weight 1 -uniform a + grid $top.buts - -pady 10 -sticky ew + focus $top.tag +} + +proc domktag {} { + global mktagtop env tagids idtags + + set id [$mktagtop.sha1 get] + set tag [$mktagtop.tag get] + set msg [$mktagtop.msg get] + if {$tag == {}} { + error_popup [mc "No tag name specified"] $mktagtop + return 0 + } + if {[info exists tagids($tag)]} { + error_popup [mc "Tag \"%s\" already exists" $tag] $mktagtop + return 0 + } + if {[catch { + if {$msg != {}} { + exec git tag -a -m $msg $tag $id + } else { + exec git tag $tag $id + } + } err]} { + error_popup "[mc "Error creating tag:"] $err" $mktagtop + return 0 + } + + set tagids($tag) $id + lappend idtags($id) $tag + redrawtags $id + addedtag $id + dispneartags 0 + run refill_reflist + return 1 +} + +proc redrawtags {id} { + global canv linehtag idpos currentid curview cmitlisted markedid + global canvxmax iddrawn circleitem mainheadid circlecolors + + if {![commitinview $id $curview]} return + if {![info exists iddrawn($id)]} return + set row [rowofcommit $id] + if {$id eq $mainheadid} { + set ofill yellow + } else { + set ofill [lindex $circlecolors $cmitlisted($curview,$id)] + } + $canv itemconf $circleitem($row) -fill $ofill + $canv delete tag.$id + set xt [eval drawtags $id $idpos($id)] + $canv coords $linehtag($id) $xt [lindex $idpos($id) 2] + set text [$canv itemcget $linehtag($id) -text] + set font [$canv itemcget $linehtag($id) -font] + set xr [expr {$xt + [font measure $font $text]}] + if {$xr > $canvxmax} { + set canvxmax $xr + setcanvscroll + } + if {[info exists currentid] && $currentid == $id} { + make_secsel $id + } + if {[info exists markedid] && $markedid eq $id} { + make_idmark $id + } +} + +proc mktagcan {} { + global mktagtop + + catch {destroy $mktagtop} + unset mktagtop +} + +proc mktaggo {} { + if {![domktag]} return + mktagcan +} + +proc writecommit {} { + global rowmenuid wrcomtop commitinfo wrcomcmd NS + + set top .writecommit + set wrcomtop $top + catch {destroy $top} + ttk_toplevel $top + make_transient $top . + ${NS}::label $top.title -text [mc "Write commit to file"] + grid $top.title - -pady 10 + ${NS}::label $top.id -text [mc "ID:"] + ${NS}::entry $top.sha1 -width 40 + $top.sha1 insert 0 $rowmenuid + $top.sha1 conf -state readonly + grid $top.id $top.sha1 -sticky w + ${NS}::entry $top.head -width 60 + $top.head insert 0 [lindex $commitinfo($rowmenuid) 0] + $top.head conf -state readonly + grid x $top.head -sticky w + ${NS}::label $top.clab -text [mc "Command:"] + ${NS}::entry $top.cmd -width 60 -textvariable wrcomcmd + grid $top.clab $top.cmd -sticky w -pady 10 + ${NS}::label $top.flab -text [mc "Output file:"] + ${NS}::entry $top.fname -width 60 + $top.fname insert 0 [file normalize "commit-[string range $rowmenuid 0 6]"] + grid $top.flab $top.fname -sticky w + ${NS}::frame $top.buts + ${NS}::button $top.buts.gen -text [mc "Write"] -command wrcomgo + ${NS}::button $top.buts.can -text [mc "Cancel"] -command wrcomcan + bind $top wrcomgo + bind $top wrcomcan + grid $top.buts.gen $top.buts.can + grid columnconfigure $top.buts 0 -weight 1 -uniform a + grid columnconfigure $top.buts 1 -weight 1 -uniform a + grid $top.buts - -pady 10 -sticky ew + focus $top.fname +} + +proc wrcomgo {} { + global wrcomtop + + set id [$wrcomtop.sha1 get] + set cmd "echo $id | [$wrcomtop.cmd get]" + set fname [$wrcomtop.fname get] + if {[catch {exec sh -c $cmd >$fname &} err]} { + error_popup "[mc "Error writing commit:"] $err" $wrcomtop + } + catch {destroy $wrcomtop} + unset wrcomtop +} + +proc wrcomcan {} { + global wrcomtop + + catch {destroy $wrcomtop} + unset wrcomtop +} + +proc mkbranch {} { + global rowmenuid mkbrtop NS + + set top .makebranch + catch {destroy $top} + ttk_toplevel $top + make_transient $top . + ${NS}::label $top.title -text [mc "Create new branch"] + grid $top.title - -pady 10 + ${NS}::label $top.id -text [mc "ID:"] + ${NS}::entry $top.sha1 -width 40 + $top.sha1 insert 0 $rowmenuid + $top.sha1 conf -state readonly + grid $top.id $top.sha1 -sticky w + ${NS}::label $top.nlab -text [mc "Name:"] + ${NS}::entry $top.name -width 40 + grid $top.nlab $top.name -sticky w + ${NS}::frame $top.buts + ${NS}::button $top.buts.go -text [mc "Create"] -command [list mkbrgo $top] + ${NS}::button $top.buts.can -text [mc "Cancel"] -command "catch {destroy $top}" + bind $top [list mkbrgo $top] + bind $top "catch {destroy $top}" + grid $top.buts.go $top.buts.can + grid columnconfigure $top.buts 0 -weight 1 -uniform a + grid columnconfigure $top.buts 1 -weight 1 -uniform a + grid $top.buts - -pady 10 -sticky ew + focus $top.name +} + +proc mkbrgo {top} { + global headids idheads + + set name [$top.name get] + set id [$top.sha1 get] + set cmdargs {} + set old_id {} + if {$name eq {}} { + error_popup [mc "Please specify a name for the new branch"] $top + return + } + if {[info exists headids($name)]} { + if {![confirm_popup [mc \ + "Branch '%s' already exists. Overwrite?" $name] $top]} { + return + } + set old_id $headids($name) + lappend cmdargs -f + } + catch {destroy $top} + lappend cmdargs $name $id + nowbusy newbranch + update + if {[catch { + eval exec git branch $cmdargs + } err]} { + notbusy newbranch + error_popup $err + } else { + notbusy newbranch + if {$old_id ne {}} { + movehead $id $name + movedhead $id $name + redrawtags $old_id + redrawtags $id + } else { + set headids($name) $id + lappend idheads($id) $name + addedhead $id $name + redrawtags $id + } + dispneartags 0 + run refill_reflist + } +} + +proc exec_citool {tool_args {baseid {}}} { + global commitinfo env + + set save_env [array get env GIT_AUTHOR_*] + + if {$baseid ne {}} { + if {![info exists commitinfo($baseid)]} { + getcommit $baseid + } + set author [lindex $commitinfo($baseid) 1] + set date [lindex $commitinfo($baseid) 2] + if {[regexp {^\s*(\S.*\S|\S)\s*<(.*)>\s*$} \ + $author author name email] + && $date ne {}} { + set env(GIT_AUTHOR_NAME) $name + set env(GIT_AUTHOR_EMAIL) $email + set env(GIT_AUTHOR_DATE) $date + } + } + + eval exec git citool $tool_args & + + array unset env GIT_AUTHOR_* + array set env $save_env +} + +proc cherrypick {} { + global rowmenuid curview + global mainhead mainheadid + + set oldhead [exec git rev-parse HEAD] + set dheads [descheads $rowmenuid] + if {$dheads ne {} && [lsearch -exact $dheads $oldhead] >= 0} { + set ok [confirm_popup [mc "Commit %s is already\ + included in branch %s -- really re-apply it?" \ + [string range $rowmenuid 0 7] $mainhead]] + if {!$ok} return + } + nowbusy cherrypick [mc "Cherry-picking"] + update + # Unfortunately git-cherry-pick writes stuff to stderr even when + # no error occurs, and exec takes that as an indication of error... + if {[catch {exec sh -c "git cherry-pick -r $rowmenuid 2>&1"} err]} { + notbusy cherrypick + if {[regexp -line \ + {Entry '(.*)' (would be overwritten by merge|not uptodate)} \ + $err msg fname]} { + error_popup [mc "Cherry-pick failed because of local changes\ + to file '%s'.\nPlease commit, reset or stash\ + your changes and try again." $fname] + } elseif {[regexp -line \ + {^(CONFLICT \(.*\):|Automatic cherry-pick failed|error: could not apply)} \ + $err]} { + if {[confirm_popup [mc "Cherry-pick failed because of merge\ + conflict.\nDo you wish to run git citool to\ + resolve it?"]]} { + # Force citool to read MERGE_MSG + file delete [file join [gitdir] "GITGUI_MSG"] + exec_citool {} $rowmenuid + } + } else { + error_popup $err + } + run updatecommits + return + } + set newhead [exec git rev-parse HEAD] + if {$newhead eq $oldhead} { + notbusy cherrypick + error_popup [mc "No changes committed"] + return + } + addnewchild $newhead $oldhead + if {[commitinview $oldhead $curview]} { + # XXX this isn't right if we have a path limit... + insertrow $newhead $oldhead $curview + if {$mainhead ne {}} { + movehead $newhead $mainhead + movedhead $newhead $mainhead + } + set mainheadid $newhead + redrawtags $oldhead + redrawtags $newhead + selbyid $newhead + } + notbusy cherrypick +} + +proc resethead {} { + global mainhead rowmenuid confirm_ok resettype NS + + set confirm_ok 0 + set w ".confirmreset" + ttk_toplevel $w + make_transient $w . + wm title $w [mc "Confirm reset"] + ${NS}::label $w.m -text \ + [mc "Reset branch %s to %s?" $mainhead [string range $rowmenuid 0 7]] + pack $w.m -side top -fill x -padx 20 -pady 20 + ${NS}::labelframe $w.f -text [mc "Reset type:"] + set resettype mixed + ${NS}::radiobutton $w.f.soft -value soft -variable resettype \ + -text [mc "Soft: Leave working tree and index untouched"] + grid $w.f.soft -sticky w + ${NS}::radiobutton $w.f.mixed -value mixed -variable resettype \ + -text [mc "Mixed: Leave working tree untouched, reset index"] + grid $w.f.mixed -sticky w + ${NS}::radiobutton $w.f.hard -value hard -variable resettype \ + -text [mc "Hard: Reset working tree and index\n(discard ALL local changes)"] + grid $w.f.hard -sticky w + pack $w.f -side top -fill x -padx 4 + ${NS}::button $w.ok -text [mc OK] -command "set confirm_ok 1; destroy $w" + pack $w.ok -side left -fill x -padx 20 -pady 20 + ${NS}::button $w.cancel -text [mc Cancel] -command "destroy $w" + bind $w [list destroy $w] + pack $w.cancel -side right -fill x -padx 20 -pady 20 + bind $w "grab $w; focus $w" + tkwait window $w + if {!$confirm_ok} return + if {[catch {set fd [open \ + [list | git reset --$resettype $rowmenuid 2>@1] r]} err]} { + error_popup $err + } else { + dohidelocalchanges + filerun $fd [list readresetstat $fd] + nowbusy reset [mc "Resetting"] + selbyid $rowmenuid + } +} + +proc readresetstat {fd} { + global mainhead mainheadid showlocalchanges rprogcoord + + if {[gets $fd line] >= 0} { + if {[regexp {([0-9]+)% \(([0-9]+)/([0-9]+)\)} $line match p m n]} { + set rprogcoord [expr {1.0 * $m / $n}] + adjustprogress + } + return 1 + } + set rprogcoord 0 + adjustprogress + notbusy reset + if {[catch {close $fd} err]} { + error_popup $err + } + set oldhead $mainheadid + set newhead [exec git rev-parse HEAD] + if {$newhead ne $oldhead} { + movehead $newhead $mainhead + movedhead $newhead $mainhead + set mainheadid $newhead + redrawtags $oldhead + redrawtags $newhead + } + if {$showlocalchanges} { + doshowlocalchanges + } + return 0 +} + +# context menu for a head +proc headmenu {x y id head} { + global headmenuid headmenuhead headctxmenu mainhead + + stopfinding + set headmenuid $id + set headmenuhead $head + set state normal + if {[string match "remotes/*" $head]} { + set state disabled + } + if {$head eq $mainhead} { + set state disabled + } + $headctxmenu entryconfigure 0 -state $state + $headctxmenu entryconfigure 1 -state $state + tk_popup $headctxmenu $x $y +} + +proc cobranch {} { + global headmenuid headmenuhead headids + global showlocalchanges + + # check the tree is clean first?? + nowbusy checkout [mc "Checking out"] + update + dohidelocalchanges + if {[catch { + set fd [open [list | git checkout $headmenuhead 2>@1] r] + } err]} { + notbusy checkout + error_popup $err + if {$showlocalchanges} { + dodiffindex + } + } else { + filerun $fd [list readcheckoutstat $fd $headmenuhead $headmenuid] + } +} + +proc readcheckoutstat {fd newhead newheadid} { + global mainhead mainheadid headids showlocalchanges progresscoords + global viewmainheadid curview + + if {[gets $fd line] >= 0} { + if {[regexp {([0-9]+)% \(([0-9]+)/([0-9]+)\)} $line match p m n]} { + set progresscoords [list 0 [expr {1.0 * $m / $n}]] + adjustprogress + } + return 1 + } + set progresscoords {0 0} + adjustprogress + notbusy checkout + if {[catch {close $fd} err]} { + error_popup $err + } + set oldmainid $mainheadid + set mainhead $newhead + set mainheadid $newheadid + set viewmainheadid($curview) $newheadid + redrawtags $oldmainid + redrawtags $newheadid + selbyid $newheadid + if {$showlocalchanges} { + dodiffindex + } +} + +proc rmbranch {} { + global headmenuid headmenuhead mainhead + global idheads + + set head $headmenuhead + set id $headmenuid + # this check shouldn't be needed any more... + if {$head eq $mainhead} { + error_popup [mc "Cannot delete the currently checked-out branch"] + return + } + set dheads [descheads $id] + if {[llength $dheads] == 1 && $idheads($dheads) eq $head} { + # the stuff on this branch isn't on any other branch + if {![confirm_popup [mc "The commits on branch %s aren't on any other\ + branch.\nReally delete branch %s?" $head $head]]} return + } + nowbusy rmbranch + update + if {[catch {exec git branch -D $head} err]} { + notbusy rmbranch + error_popup $err + return + } + removehead $id $head + removedhead $id $head + redrawtags $id + notbusy rmbranch + dispneartags 0 + run refill_reflist +} + +# Display a list of tags and heads +proc showrefs {} { + global showrefstop bgcolor fgcolor selectbgcolor NS + global bglist fglist reflistfilter reflist maincursor + + set top .showrefs + set showrefstop $top + if {[winfo exists $top]} { + raise $top + refill_reflist + return + } + ttk_toplevel $top + wm title $top [mc "Tags and heads: %s" [file tail [pwd]]] + make_transient $top . + text $top.list -background $bgcolor -foreground $fgcolor \ + -selectbackground $selectbgcolor -font mainfont \ + -xscrollcommand "$top.xsb set" -yscrollcommand "$top.ysb set" \ + -width 30 -height 20 -cursor $maincursor \ + -spacing1 1 -spacing3 1 -state disabled + $top.list tag configure highlight -background $selectbgcolor + lappend bglist $top.list + lappend fglist $top.list + ${NS}::scrollbar $top.ysb -command "$top.list yview" -orient vertical + ${NS}::scrollbar $top.xsb -command "$top.list xview" -orient horizontal + grid $top.list $top.ysb -sticky nsew + grid $top.xsb x -sticky ew + ${NS}::frame $top.f + ${NS}::label $top.f.l -text "[mc "Filter"]: " + ${NS}::entry $top.f.e -width 20 -textvariable reflistfilter + set reflistfilter "*" + trace add variable reflistfilter write reflistfilter_change + pack $top.f.e -side right -fill x -expand 1 + pack $top.f.l -side left + grid $top.f - -sticky ew -pady 2 + ${NS}::button $top.close -command [list destroy $top] -text [mc "Close"] + bind $top [list destroy $top] + grid $top.close - + grid columnconfigure $top 0 -weight 1 + grid rowconfigure $top 0 -weight 1 + bind $top.list <1> {break} + bind $top.list {break} + bind $top.list {sel_reflist %W %x %y; break} + set reflist {} + refill_reflist +} + +proc sel_reflist {w x y} { + global showrefstop reflist headids tagids otherrefids + + if {![winfo exists $showrefstop]} return + set l [lindex [split [$w index "@$x,$y"] "."] 0] + set ref [lindex $reflist [expr {$l-1}]] + set n [lindex $ref 0] + switch -- [lindex $ref 1] { + "H" {selbyid $headids($n)} + "T" {selbyid $tagids($n)} + "o" {selbyid $otherrefids($n)} + } + $showrefstop.list tag add highlight $l.0 "$l.0 lineend" +} + +proc unsel_reflist {} { + global showrefstop + + if {![info exists showrefstop] || ![winfo exists $showrefstop]} return + $showrefstop.list tag remove highlight 0.0 end +} + +proc reflistfilter_change {n1 n2 op} { + global reflistfilter + + after cancel refill_reflist + after 200 refill_reflist +} + +proc refill_reflist {} { + global reflist reflistfilter showrefstop headids tagids otherrefids + global curview + + if {![info exists showrefstop] || ![winfo exists $showrefstop]} return + set refs {} + foreach n [array names headids] { + if {[string match $reflistfilter $n]} { + if {[commitinview $headids($n) $curview]} { + lappend refs [list $n H] + } else { + interestedin $headids($n) {run refill_reflist} + } + } + } + foreach n [array names tagids] { + if {[string match $reflistfilter $n]} { + if {[commitinview $tagids($n) $curview]} { + lappend refs [list $n T] + } else { + interestedin $tagids($n) {run refill_reflist} + } + } + } + foreach n [array names otherrefids] { + if {[string match $reflistfilter $n]} { + if {[commitinview $otherrefids($n) $curview]} { + lappend refs [list $n o] + } else { + interestedin $otherrefids($n) {run refill_reflist} + } + } + } + set refs [lsort -index 0 $refs] + if {$refs eq $reflist} return + + # Update the contents of $showrefstop.list according to the + # differences between $reflist (old) and $refs (new) + $showrefstop.list conf -state normal + $showrefstop.list insert end "\n" + set i 0 + set j 0 + while {$i < [llength $reflist] || $j < [llength $refs]} { + if {$i < [llength $reflist]} { + if {$j < [llength $refs]} { + set cmp [string compare [lindex $reflist $i 0] \ + [lindex $refs $j 0]] + if {$cmp == 0} { + set cmp [string compare [lindex $reflist $i 1] \ + [lindex $refs $j 1]] + } + } else { + set cmp -1 + } + } else { + set cmp 1 + } + switch -- $cmp { + -1 { + $showrefstop.list delete "[expr {$j+1}].0" "[expr {$j+2}].0" + incr i + } + 0 { + incr i + incr j + } + 1 { + set l [expr {$j + 1}] + $showrefstop.list image create $l.0 -align baseline \ + -image reficon-[lindex $refs $j 1] -padx 2 + $showrefstop.list insert $l.1 "[lindex $refs $j 0]\n" + incr j + } + } + } + set reflist $refs + # delete last newline + $showrefstop.list delete end-2c end-1c + $showrefstop.list conf -state disabled +} + +# Stuff for finding nearby tags +proc getallcommits {} { + global allcommits nextarc seeds allccache allcwait cachedarcs allcupdate + global idheads idtags idotherrefs allparents tagobjid + + if {![info exists allcommits]} { + set nextarc 0 + set allcommits 0 + set seeds {} + set allcwait 0 + set cachedarcs 0 + set allccache [file join [gitdir] "gitk.cache"] + if {![catch { + set f [open $allccache r] + set allcwait 1 + getcache $f + }]} return + } + + if {$allcwait} { + return + } + set cmd [list | git rev-list --parents] + set allcupdate [expr {$seeds ne {}}] + if {!$allcupdate} { + set ids "--all" + } else { + set refs [concat [array names idheads] [array names idtags] \ + [array names idotherrefs]] + set ids {} + set tagobjs {} + foreach name [array names tagobjid] { + lappend tagobjs $tagobjid($name) + } + foreach id [lsort -unique $refs] { + if {![info exists allparents($id)] && + [lsearch -exact $tagobjs $id] < 0} { + lappend ids $id + } + } + if {$ids ne {}} { + foreach id $seeds { + lappend ids "^$id" + } + } + } + if {$ids ne {}} { + set fd [open [concat $cmd $ids] r] + fconfigure $fd -blocking 0 + incr allcommits + nowbusy allcommits + filerun $fd [list getallclines $fd] + } else { + dispneartags 0 + } +} + +# Since most commits have 1 parent and 1 child, we group strings of +# such commits into "arcs" joining branch/merge points (BMPs), which +# are commits that either don't have 1 parent or don't have 1 child. +# +# arcnos(id) - incoming arcs for BMP, arc we're on for other nodes +# arcout(id) - outgoing arcs for BMP +# arcids(a) - list of IDs on arc including end but not start +# arcstart(a) - BMP ID at start of arc +# arcend(a) - BMP ID at end of arc +# growing(a) - arc a is still growing +# arctags(a) - IDs out of arcids (excluding end) that have tags +# archeads(a) - IDs out of arcids (excluding end) that have heads +# The start of an arc is at the descendent end, so "incoming" means +# coming from descendents, and "outgoing" means going towards ancestors. + +proc getallclines {fd} { + global allparents allchildren idtags idheads nextarc + global arcnos arcids arctags arcout arcend arcstart archeads growing + global seeds allcommits cachedarcs allcupdate + + set nid 0 + while {[incr nid] <= 1000 && [gets $fd line] >= 0} { + set id [lindex $line 0] + if {[info exists allparents($id)]} { + # seen it already + continue + } + set cachedarcs 0 + set olds [lrange $line 1 end] + set allparents($id) $olds + if {![info exists allchildren($id)]} { + set allchildren($id) {} + set arcnos($id) {} + lappend seeds $id + } else { + set a $arcnos($id) + if {[llength $olds] == 1 && [llength $a] == 1} { + lappend arcids($a) $id + if {[info exists idtags($id)]} { + lappend arctags($a) $id + } + if {[info exists idheads($id)]} { + lappend archeads($a) $id + } + if {[info exists allparents($olds)]} { + # seen parent already + if {![info exists arcout($olds)]} { + splitarc $olds + } + lappend arcids($a) $olds + set arcend($a) $olds + unset growing($a) + } + lappend allchildren($olds) $id + lappend arcnos($olds) $a + continue + } + } + foreach a $arcnos($id) { + lappend arcids($a) $id + set arcend($a) $id + unset growing($a) + } + + set ao {} + foreach p $olds { + lappend allchildren($p) $id + set a [incr nextarc] + set arcstart($a) $id + set archeads($a) {} + set arctags($a) {} + set archeads($a) {} + set arcids($a) {} + lappend ao $a + set growing($a) 1 + if {[info exists allparents($p)]} { + # seen it already, may need to make a new branch + if {![info exists arcout($p)]} { + splitarc $p + } + lappend arcids($a) $p + set arcend($a) $p + unset growing($a) + } + lappend arcnos($p) $a + } + set arcout($id) $ao + } + if {$nid > 0} { + global cached_dheads cached_dtags cached_atags + catch {unset cached_dheads} + catch {unset cached_dtags} + catch {unset cached_atags} + } + if {![eof $fd]} { + return [expr {$nid >= 1000? 2: 1}] + } + set cacheok 1 + if {[catch { + fconfigure $fd -blocking 1 + close $fd + } err]} { + # got an error reading the list of commits + # if we were updating, try rereading the whole thing again + if {$allcupdate} { + incr allcommits -1 + dropcache $err + return + } + error_popup "[mc "Error reading commit topology information;\ + branch and preceding/following tag information\ + will be incomplete."]\n($err)" + set cacheok 0 + } + if {[incr allcommits -1] == 0} { + notbusy allcommits + if {$cacheok} { + run savecache + } + } + dispneartags 0 + return 0 +} + +proc recalcarc {a} { + global arctags archeads arcids idtags idheads + + set at {} + set ah {} + foreach id [lrange $arcids($a) 0 end-1] { + if {[info exists idtags($id)]} { + lappend at $id + } + if {[info exists idheads($id)]} { + lappend ah $id + } + } + set arctags($a) $at + set archeads($a) $ah +} + +proc splitarc {p} { + global arcnos arcids nextarc arctags archeads idtags idheads + global arcstart arcend arcout allparents growing + + set a $arcnos($p) + if {[llength $a] != 1} { + puts "oops splitarc called but [llength $a] arcs already" + return + } + set a [lindex $a 0] + set i [lsearch -exact $arcids($a) $p] + if {$i < 0} { + puts "oops splitarc $p not in arc $a" + return + } + set na [incr nextarc] + if {[info exists arcend($a)]} { + set arcend($na) $arcend($a) + } else { + set l [lindex $allparents([lindex $arcids($a) end]) 0] + set j [lsearch -exact $arcnos($l) $a] + set arcnos($l) [lreplace $arcnos($l) $j $j $na] + } + set tail [lrange $arcids($a) [expr {$i+1}] end] + set arcids($a) [lrange $arcids($a) 0 $i] + set arcend($a) $p + set arcstart($na) $p + set arcout($p) $na + set arcids($na) $tail + if {[info exists growing($a)]} { + set growing($na) 1 + unset growing($a) + } + + foreach id $tail { + if {[llength $arcnos($id)] == 1} { + set arcnos($id) $na + } else { + set j [lsearch -exact $arcnos($id) $a] + set arcnos($id) [lreplace $arcnos($id) $j $j $na] + } + } + + # reconstruct tags and heads lists + if {$arctags($a) ne {} || $archeads($a) ne {}} { + recalcarc $a + recalcarc $na + } else { + set arctags($na) {} + set archeads($na) {} + } +} + +# Update things for a new commit added that is a child of one +# existing commit. Used when cherry-picking. +proc addnewchild {id p} { + global allparents allchildren idtags nextarc + global arcnos arcids arctags arcout arcend arcstart archeads growing + global seeds allcommits + + if {![info exists allcommits] || ![info exists arcnos($p)]} return + set allparents($id) [list $p] + set allchildren($id) {} + set arcnos($id) {} + lappend seeds $id + lappend allchildren($p) $id + set a [incr nextarc] + set arcstart($a) $id + set archeads($a) {} + set arctags($a) {} + set arcids($a) [list $p] + set arcend($a) $p + if {![info exists arcout($p)]} { + splitarc $p + } + lappend arcnos($p) $a + set arcout($id) [list $a] +} + +# This implements a cache for the topology information. +# The cache saves, for each arc, the start and end of the arc, +# the ids on the arc, and the outgoing arcs from the end. +proc readcache {f} { + global arcnos arcids arcout arcstart arcend arctags archeads nextarc + global idtags idheads allparents cachedarcs possible_seeds seeds growing + global allcwait + + set a $nextarc + set lim $cachedarcs + if {$lim - $a > 500} { + set lim [expr {$a + 500}] + } + if {[catch { + if {$a == $lim} { + # finish reading the cache and setting up arctags, etc. + set line [gets $f] + if {$line ne "1"} {error "bad final version"} + close $f + foreach id [array names idtags] { + if {[info exists arcnos($id)] && [llength $arcnos($id)] == 1 && + [llength $allparents($id)] == 1} { + set a [lindex $arcnos($id) 0] + if {$arctags($a) eq {}} { + recalcarc $a + } + } + } + foreach id [array names idheads] { + if {[info exists arcnos($id)] && [llength $arcnos($id)] == 1 && + [llength $allparents($id)] == 1} { + set a [lindex $arcnos($id) 0] + if {$archeads($a) eq {}} { + recalcarc $a + } + } + } + foreach id [lsort -unique $possible_seeds] { + if {$arcnos($id) eq {}} { + lappend seeds $id + } + } + set allcwait 0 + } else { + while {[incr a] <= $lim} { + set line [gets $f] + if {[llength $line] != 3} {error "bad line"} + set s [lindex $line 0] + set arcstart($a) $s + lappend arcout($s) $a + if {![info exists arcnos($s)]} { + lappend possible_seeds $s + set arcnos($s) {} + } + set e [lindex $line 1] + if {$e eq {}} { + set growing($a) 1 + } else { + set arcend($a) $e + if {![info exists arcout($e)]} { + set arcout($e) {} + } + } + set arcids($a) [lindex $line 2] + foreach id $arcids($a) { + lappend allparents($s) $id + set s $id + lappend arcnos($id) $a + } + if {![info exists allparents($s)]} { + set allparents($s) {} + } + set arctags($a) {} + set archeads($a) {} + } + set nextarc [expr {$a - 1}] + } + } err]} { + dropcache $err + return 0 + } + if {!$allcwait} { + getallcommits + } + return $allcwait +} + +proc getcache {f} { + global nextarc cachedarcs possible_seeds + + if {[catch { + set line [gets $f] + if {[llength $line] != 2 || [lindex $line 0] ne "1"} {error "bad version"} + # make sure it's an integer + set cachedarcs [expr {int([lindex $line 1])}] + if {$cachedarcs < 0} {error "bad number of arcs"} + set nextarc 0 + set possible_seeds {} + run readcache $f + } err]} { + dropcache $err + } + return 0 +} + +proc dropcache {err} { + global allcwait nextarc cachedarcs seeds + + #puts "dropping cache ($err)" + foreach v {arcnos arcout arcids arcstart arcend growing \ + arctags archeads allparents allchildren} { + global $v + catch {unset $v} + } + set allcwait 0 + set nextarc 0 + set cachedarcs 0 + set seeds {} + getallcommits +} + +proc writecache {f} { + global cachearc cachedarcs allccache + global arcstart arcend arcnos arcids arcout + + set a $cachearc + set lim $cachedarcs + if {$lim - $a > 1000} { + set lim [expr {$a + 1000}] + } + if {[catch { + while {[incr a] <= $lim} { + if {[info exists arcend($a)]} { + puts $f [list $arcstart($a) $arcend($a) $arcids($a)] + } else { + puts $f [list $arcstart($a) {} $arcids($a)] + } + } + } err]} { + catch {close $f} + catch {file delete $allccache} + #puts "writing cache failed ($err)" + return 0 + } + set cachearc [expr {$a - 1}] + if {$a > $cachedarcs} { + puts $f "1" + close $f + return 0 + } + return 1 +} + +proc savecache {} { + global nextarc cachedarcs cachearc allccache + + if {$nextarc == $cachedarcs} return + set cachearc 0 + set cachedarcs $nextarc + catch { + set f [open $allccache w] + puts $f [list 1 $cachedarcs] + run writecache $f + } +} + +# Returns 1 if a is an ancestor of b, -1 if b is an ancestor of a, +# or 0 if neither is true. +proc anc_or_desc {a b} { + global arcout arcstart arcend arcnos cached_isanc + + if {$arcnos($a) eq $arcnos($b)} { + # Both are on the same arc(s); either both are the same BMP, + # or if one is not a BMP, the other is also not a BMP or is + # the BMP at end of the arc (and it only has 1 incoming arc). + # Or both can be BMPs with no incoming arcs. + if {$a eq $b || $arcnos($a) eq {}} { + return 0 + } + # assert {[llength $arcnos($a)] == 1} + set arc [lindex $arcnos($a) 0] + set i [lsearch -exact $arcids($arc) $a] + set j [lsearch -exact $arcids($arc) $b] + if {$i < 0 || $i > $j} { + return 1 + } else { + return -1 + } + } + + if {![info exists arcout($a)]} { + set arc [lindex $arcnos($a) 0] + if {[info exists arcend($arc)]} { + set aend $arcend($arc) + } else { + set aend {} + } + set a $arcstart($arc) + } else { + set aend $a + } + if {![info exists arcout($b)]} { + set arc [lindex $arcnos($b) 0] + if {[info exists arcend($arc)]} { + set bend $arcend($arc) + } else { + set bend {} + } + set b $arcstart($arc) + } else { + set bend $b + } + if {$a eq $bend} { + return 1 + } + if {$b eq $aend} { + return -1 + } + if {[info exists cached_isanc($a,$bend)]} { + if {$cached_isanc($a,$bend)} { + return 1 + } + } + if {[info exists cached_isanc($b,$aend)]} { + if {$cached_isanc($b,$aend)} { + return -1 + } + if {[info exists cached_isanc($a,$bend)]} { + return 0 + } + } + + set todo [list $a $b] + set anc($a) a + set anc($b) b + for {set i 0} {$i < [llength $todo]} {incr i} { + set x [lindex $todo $i] + if {$anc($x) eq {}} { + continue + } + foreach arc $arcnos($x) { + set xd $arcstart($arc) + if {$xd eq $bend} { + set cached_isanc($a,$bend) 1 + set cached_isanc($b,$aend) 0 + return 1 + } elseif {$xd eq $aend} { + set cached_isanc($b,$aend) 1 + set cached_isanc($a,$bend) 0 + return -1 + } + if {![info exists anc($xd)]} { + set anc($xd) $anc($x) + lappend todo $xd + } elseif {$anc($xd) ne $anc($x)} { + set anc($xd) {} + } + } + } + set cached_isanc($a,$bend) 0 + set cached_isanc($b,$aend) 0 + return 0 +} + +# This identifies whether $desc has an ancestor that is +# a growing tip of the graph and which is not an ancestor of $anc +# and returns 0 if so and 1 if not. +# If we subsequently discover a tag on such a growing tip, and that +# turns out to be a descendent of $anc (which it could, since we +# don't necessarily see children before parents), then $desc +# isn't a good choice to display as a descendent tag of +# $anc (since it is the descendent of another tag which is +# a descendent of $anc). Similarly, $anc isn't a good choice to +# display as a ancestor tag of $desc. +# +proc is_certain {desc anc} { + global arcnos arcout arcstart arcend growing problems + + set certain {} + if {[llength $arcnos($anc)] == 1} { + # tags on the same arc are certain + if {$arcnos($desc) eq $arcnos($anc)} { + return 1 + } + if {![info exists arcout($anc)]} { + # if $anc is partway along an arc, use the start of the arc instead + set a [lindex $arcnos($anc) 0] + set anc $arcstart($a) + } + } + if {[llength $arcnos($desc)] > 1 || [info exists arcout($desc)]} { + set x $desc + } else { + set a [lindex $arcnos($desc) 0] + set x $arcend($a) + } + if {$x == $anc} { + return 1 + } + set anclist [list $x] + set dl($x) 1 + set nnh 1 + set ngrowanc 0 + for {set i 0} {$i < [llength $anclist] && ($nnh > 0 || $ngrowanc > 0)} {incr i} { + set x [lindex $anclist $i] + if {$dl($x)} { + incr nnh -1 + } + set done($x) 1 + foreach a $arcout($x) { + if {[info exists growing($a)]} { + if {![info exists growanc($x)] && $dl($x)} { + set growanc($x) 1 + incr ngrowanc + } + } else { + set y $arcend($a) + if {[info exists dl($y)]} { + if {$dl($y)} { + if {!$dl($x)} { + set dl($y) 0 + if {![info exists done($y)]} { + incr nnh -1 + } + if {[info exists growanc($x)]} { + incr ngrowanc -1 + } + set xl [list $y] + for {set k 0} {$k < [llength $xl]} {incr k} { + set z [lindex $xl $k] + foreach c $arcout($z) { + if {[info exists arcend($c)]} { + set v $arcend($c) + if {[info exists dl($v)] && $dl($v)} { + set dl($v) 0 + if {![info exists done($v)]} { + incr nnh -1 + } + if {[info exists growanc($v)]} { + incr ngrowanc -1 + } + lappend xl $v + } + } + } + } + } + } + } elseif {$y eq $anc || !$dl($x)} { + set dl($y) 0 + lappend anclist $y + } else { + set dl($y) 1 + lappend anclist $y + incr nnh + } + } + } + } + foreach x [array names growanc] { + if {$dl($x)} { + return 0 + } + return 0 + } + return 1 +} + +proc validate_arctags {a} { + global arctags idtags + + set i -1 + set na $arctags($a) + foreach id $arctags($a) { + incr i + if {![info exists idtags($id)]} { + set na [lreplace $na $i $i] + incr i -1 + } + } + set arctags($a) $na +} + +proc validate_archeads {a} { + global archeads idheads + + set i -1 + set na $archeads($a) + foreach id $archeads($a) { + incr i + if {![info exists idheads($id)]} { + set na [lreplace $na $i $i] + incr i -1 + } + } + set archeads($a) $na +} + +# Return the list of IDs that have tags that are descendents of id, +# ignoring IDs that are descendents of IDs already reported. +proc desctags {id} { + global arcnos arcstart arcids arctags idtags allparents + global growing cached_dtags + + if {![info exists allparents($id)]} { + return {} + } + set t1 [clock clicks -milliseconds] + set argid $id + if {[llength $arcnos($id)] == 1 && [llength $allparents($id)] == 1} { + # part-way along an arc; check that arc first + set a [lindex $arcnos($id) 0] + if {$arctags($a) ne {}} { + validate_arctags $a + set i [lsearch -exact $arcids($a) $id] + set tid {} + foreach t $arctags($a) { + set j [lsearch -exact $arcids($a) $t] + if {$j >= $i} break + set tid $t + } + if {$tid ne {}} { + return $tid + } + } + set id $arcstart($a) + if {[info exists idtags($id)]} { + return $id + } + } + if {[info exists cached_dtags($id)]} { + return $cached_dtags($id) + } + + set origid $id + set todo [list $id] + set queued($id) 1 + set nc 1 + for {set i 0} {$i < [llength $todo] && $nc > 0} {incr i} { + set id [lindex $todo $i] + set done($id) 1 + set ta [info exists hastaggedancestor($id)] + if {!$ta} { + incr nc -1 + } + # ignore tags on starting node + if {!$ta && $i > 0} { + if {[info exists idtags($id)]} { + set tagloc($id) $id + set ta 1 + } elseif {[info exists cached_dtags($id)]} { + set tagloc($id) $cached_dtags($id) + set ta 1 + } + } + foreach a $arcnos($id) { + set d $arcstart($a) + if {!$ta && $arctags($a) ne {}} { + validate_arctags $a + if {$arctags($a) ne {}} { + lappend tagloc($id) [lindex $arctags($a) end] + } + } + if {$ta || $arctags($a) ne {}} { + set tomark [list $d] + for {set j 0} {$j < [llength $tomark]} {incr j} { + set dd [lindex $tomark $j] + if {![info exists hastaggedancestor($dd)]} { + if {[info exists done($dd)]} { + foreach b $arcnos($dd) { + lappend tomark $arcstart($b) + } + if {[info exists tagloc($dd)]} { + unset tagloc($dd) + } + } elseif {[info exists queued($dd)]} { + incr nc -1 + } + set hastaggedancestor($dd) 1 + } + } + } + if {![info exists queued($d)]} { + lappend todo $d + set queued($d) 1 + if {![info exists hastaggedancestor($d)]} { + incr nc + } + } + } + } + set tags {} + foreach id [array names tagloc] { + if {![info exists hastaggedancestor($id)]} { + foreach t $tagloc($id) { + if {[lsearch -exact $tags $t] < 0} { + lappend tags $t + } + } + } + } + set t2 [clock clicks -milliseconds] + set loopix $i + + # remove tags that are descendents of other tags + for {set i 0} {$i < [llength $tags]} {incr i} { + set a [lindex $tags $i] + for {set j 0} {$j < $i} {incr j} { + set b [lindex $tags $j] + set r [anc_or_desc $a $b] + if {$r == 1} { + set tags [lreplace $tags $j $j] + incr j -1 + incr i -1 + } elseif {$r == -1} { + set tags [lreplace $tags $i $i] + incr i -1 + break + } + } + } + + if {[array names growing] ne {}} { + # graph isn't finished, need to check if any tag could get + # eclipsed by another tag coming later. Simply ignore any + # tags that could later get eclipsed. + set ctags {} + foreach t $tags { + if {[is_certain $t $origid]} { + lappend ctags $t + } + } + if {$tags eq $ctags} { + set cached_dtags($origid) $tags + } else { + set tags $ctags + } + } else { + set cached_dtags($origid) $tags + } + set t3 [clock clicks -milliseconds] + if {0 && $t3 - $t1 >= 100} { + puts "iterating descendents ($loopix/[llength $todo] nodes) took\ + [expr {$t2-$t1}]+[expr {$t3-$t2}]ms, $nc candidates left" + } + return $tags +} + +proc anctags {id} { + global arcnos arcids arcout arcend arctags idtags allparents + global growing cached_atags + + if {![info exists allparents($id)]} { + return {} + } + set t1 [clock clicks -milliseconds] + set argid $id + if {[llength $arcnos($id)] == 1 && [llength $allparents($id)] == 1} { + # part-way along an arc; check that arc first + set a [lindex $arcnos($id) 0] + if {$arctags($a) ne {}} { + validate_arctags $a + set i [lsearch -exact $arcids($a) $id] + foreach t $arctags($a) { + set j [lsearch -exact $arcids($a) $t] + if {$j > $i} { + return $t + } + } + } + if {![info exists arcend($a)]} { + return {} + } + set id $arcend($a) + if {[info exists idtags($id)]} { + return $id + } + } + if {[info exists cached_atags($id)]} { + return $cached_atags($id) + } + + set origid $id + set todo [list $id] + set queued($id) 1 + set taglist {} + set nc 1 + for {set i 0} {$i < [llength $todo] && $nc > 0} {incr i} { + set id [lindex $todo $i] + set done($id) 1 + set td [info exists hastaggeddescendent($id)] + if {!$td} { + incr nc -1 + } + # ignore tags on starting node + if {!$td && $i > 0} { + if {[info exists idtags($id)]} { + set tagloc($id) $id + set td 1 + } elseif {[info exists cached_atags($id)]} { + set tagloc($id) $cached_atags($id) + set td 1 + } + } + foreach a $arcout($id) { + if {!$td && $arctags($a) ne {}} { + validate_arctags $a + if {$arctags($a) ne {}} { + lappend tagloc($id) [lindex $arctags($a) 0] + } + } + if {![info exists arcend($a)]} continue + set d $arcend($a) + if {$td || $arctags($a) ne {}} { + set tomark [list $d] + for {set j 0} {$j < [llength $tomark]} {incr j} { + set dd [lindex $tomark $j] + if {![info exists hastaggeddescendent($dd)]} { + if {[info exists done($dd)]} { + foreach b $arcout($dd) { + if {[info exists arcend($b)]} { + lappend tomark $arcend($b) + } + } + if {[info exists tagloc($dd)]} { + unset tagloc($dd) + } + } elseif {[info exists queued($dd)]} { + incr nc -1 + } + set hastaggeddescendent($dd) 1 + } + } + } + if {![info exists queued($d)]} { + lappend todo $d + set queued($d) 1 + if {![info exists hastaggeddescendent($d)]} { + incr nc + } + } + } + } + set t2 [clock clicks -milliseconds] + set loopix $i + set tags {} + foreach id [array names tagloc] { + if {![info exists hastaggeddescendent($id)]} { + foreach t $tagloc($id) { + if {[lsearch -exact $tags $t] < 0} { + lappend tags $t + } + } + } + } + + # remove tags that are ancestors of other tags + for {set i 0} {$i < [llength $tags]} {incr i} { + set a [lindex $tags $i] + for {set j 0} {$j < $i} {incr j} { + set b [lindex $tags $j] + set r [anc_or_desc $a $b] + if {$r == -1} { + set tags [lreplace $tags $j $j] + incr j -1 + incr i -1 + } elseif {$r == 1} { + set tags [lreplace $tags $i $i] + incr i -1 + break + } + } + } + + if {[array names growing] ne {}} { + # graph isn't finished, need to check if any tag could get + # eclipsed by another tag coming later. Simply ignore any + # tags that could later get eclipsed. + set ctags {} + foreach t $tags { + if {[is_certain $origid $t]} { + lappend ctags $t + } + } + if {$tags eq $ctags} { + set cached_atags($origid) $tags + } else { + set tags $ctags + } + } else { + set cached_atags($origid) $tags + } + set t3 [clock clicks -milliseconds] + if {0 && $t3 - $t1 >= 100} { + puts "iterating ancestors ($loopix/[llength $todo] nodes) took\ + [expr {$t2-$t1}]+[expr {$t3-$t2}]ms, $nc candidates left" + } + return $tags +} + +# Return the list of IDs that have heads that are descendents of id, +# including id itself if it has a head. +proc descheads {id} { + global arcnos arcstart arcids archeads idheads cached_dheads + global allparents + + if {![info exists allparents($id)]} { + return {} + } + set aret {} + if {[llength $arcnos($id)] == 1 && [llength $allparents($id)] == 1} { + # part-way along an arc; check it first + set a [lindex $arcnos($id) 0] + if {$archeads($a) ne {}} { + validate_archeads $a + set i [lsearch -exact $arcids($a) $id] + foreach t $archeads($a) { + set j [lsearch -exact $arcids($a) $t] + if {$j > $i} break + lappend aret $t + } + } + set id $arcstart($a) + } + set origid $id + set todo [list $id] + set seen($id) 1 + set ret {} + for {set i 0} {$i < [llength $todo]} {incr i} { + set id [lindex $todo $i] + if {[info exists cached_dheads($id)]} { + set ret [concat $ret $cached_dheads($id)] + } else { + if {[info exists idheads($id)]} { + lappend ret $id + } + foreach a $arcnos($id) { + if {$archeads($a) ne {}} { + validate_archeads $a + if {$archeads($a) ne {}} { + set ret [concat $ret $archeads($a)] + } + } + set d $arcstart($a) + if {![info exists seen($d)]} { + lappend todo $d + set seen($d) 1 + } + } + } + } + set ret [lsort -unique $ret] + set cached_dheads($origid) $ret + return [concat $ret $aret] +} + +proc addedtag {id} { + global arcnos arcout cached_dtags cached_atags + + if {![info exists arcnos($id)]} return + if {![info exists arcout($id)]} { + recalcarc [lindex $arcnos($id) 0] + } + catch {unset cached_dtags} + catch {unset cached_atags} +} + +proc addedhead {hid head} { + global arcnos arcout cached_dheads + + if {![info exists arcnos($hid)]} return + if {![info exists arcout($hid)]} { + recalcarc [lindex $arcnos($hid) 0] + } + catch {unset cached_dheads} +} + +proc removedhead {hid head} { + global cached_dheads + + catch {unset cached_dheads} +} + +proc movedhead {hid head} { + global arcnos arcout cached_dheads + + if {![info exists arcnos($hid)]} return + if {![info exists arcout($hid)]} { + recalcarc [lindex $arcnos($hid) 0] + } + catch {unset cached_dheads} +} + +proc changedrefs {} { + global cached_dheads cached_dtags cached_atags + global arctags archeads arcnos arcout idheads idtags + + foreach id [concat [array names idheads] [array names idtags]] { + if {[info exists arcnos($id)] && ![info exists arcout($id)]} { + set a [lindex $arcnos($id) 0] + if {![info exists donearc($a)]} { + recalcarc $a + set donearc($a) 1 + } + } + } + catch {unset cached_dtags} + catch {unset cached_atags} + catch {unset cached_dheads} +} + +proc rereadrefs {} { + global idtags idheads idotherrefs mainheadid + + set refids [concat [array names idtags] \ + [array names idheads] [array names idotherrefs]] + foreach id $refids { + if {![info exists ref($id)]} { + set ref($id) [listrefs $id] + } + } + set oldmainhead $mainheadid + readrefs + changedrefs + set refids [lsort -unique [concat $refids [array names idtags] \ + [array names idheads] [array names idotherrefs]]] + foreach id $refids { + set v [listrefs $id] + if {![info exists ref($id)] || $ref($id) != $v} { + redrawtags $id + } + } + if {$oldmainhead ne $mainheadid} { + redrawtags $oldmainhead + redrawtags $mainheadid + } + run refill_reflist +} + +proc listrefs {id} { + global idtags idheads idotherrefs + + set x {} + if {[info exists idtags($id)]} { + set x $idtags($id) + } + set y {} + if {[info exists idheads($id)]} { + set y $idheads($id) + } + set z {} + if {[info exists idotherrefs($id)]} { + set z $idotherrefs($id) + } + return [list $x $y $z] +} + +proc showtag {tag isnew} { + global ctext tagcontents tagids linknum tagobjid + + if {$isnew} { + addtohistory [list showtag $tag 0] savectextpos + } + $ctext conf -state normal + clear_ctext + settabs 0 + set linknum 0 + if {![info exists tagcontents($tag)]} { + catch { + set tagcontents($tag) [exec git cat-file tag $tag] + } + } + if {[info exists tagcontents($tag)]} { + set text $tagcontents($tag) + } else { + set text "[mc "Tag"]: $tag\n[mc "Id"]: $tagids($tag)" + } + appendwithlinks $text {} + maybe_scroll_ctext 1 + $ctext conf -state disabled + init_flist {} +} + +proc doquit {} { + global stopped + global gitktmpdir + + set stopped 100 + savestuff . + destroy . + + if {[info exists gitktmpdir]} { + catch {file delete -force $gitktmpdir} + } +} + +proc mkfontdisp {font top which} { + global fontattr fontpref $font NS use_ttk + + set fontpref($font) [set $font] + ${NS}::button $top.${font}but -text $which \ + -command [list choosefont $font $which] + ${NS}::label $top.$font -relief flat -font $font \ + -text $fontattr($font,family) -justify left + grid x $top.${font}but $top.$font -sticky w +} + +proc choosefont {font which} { + global fontparam fontlist fonttop fontattr + global prefstop NS + + set fontparam(which) $which + set fontparam(font) $font + set fontparam(family) [font actual $font -family] + set fontparam(size) $fontattr($font,size) + set fontparam(weight) $fontattr($font,weight) + set fontparam(slant) $fontattr($font,slant) + set top .gitkfont + set fonttop $top + if {![winfo exists $top]} { + font create sample + eval font config sample [font actual $font] + ttk_toplevel $top + make_transient $top $prefstop + wm title $top [mc "Gitk font chooser"] + ${NS}::label $top.l -textvariable fontparam(which) + pack $top.l -side top + set fontlist [lsort [font families]] + ${NS}::frame $top.f + listbox $top.f.fam -listvariable fontlist \ + -yscrollcommand [list $top.f.sb set] + bind $top.f.fam <> selfontfam + ${NS}::scrollbar $top.f.sb -command [list $top.f.fam yview] + pack $top.f.sb -side right -fill y + pack $top.f.fam -side left -fill both -expand 1 + pack $top.f -side top -fill both -expand 1 + ${NS}::frame $top.g + spinbox $top.g.size -from 4 -to 40 -width 4 \ + -textvariable fontparam(size) \ + -validatecommand {string is integer -strict %s} + checkbutton $top.g.bold -padx 5 \ + -font {{Times New Roman} 12 bold} -text [mc "B"] -indicatoron 0 \ + -variable fontparam(weight) -onvalue bold -offvalue normal + checkbutton $top.g.ital -padx 5 \ + -font {{Times New Roman} 12 italic} -text [mc "I"] -indicatoron 0 \ + -variable fontparam(slant) -onvalue italic -offvalue roman + pack $top.g.size $top.g.bold $top.g.ital -side left + pack $top.g -side top + canvas $top.c -width 150 -height 50 -border 2 -relief sunk \ + -background white + $top.c create text 100 25 -anchor center -text $which -font sample \ + -fill black -tags text + bind $top.c [list centertext $top.c] + pack $top.c -side top -fill x + ${NS}::frame $top.buts + ${NS}::button $top.buts.ok -text [mc "OK"] -command fontok -default active + ${NS}::button $top.buts.can -text [mc "Cancel"] -command fontcan -default normal + bind $top fontok + bind $top fontcan + grid $top.buts.ok $top.buts.can + grid columnconfigure $top.buts 0 -weight 1 -uniform a + grid columnconfigure $top.buts 1 -weight 1 -uniform a + pack $top.buts -side bottom -fill x + trace add variable fontparam write chg_fontparam + } else { + raise $top + $top.c itemconf text -text $which + } + set i [lsearch -exact $fontlist $fontparam(family)] + if {$i >= 0} { + $top.f.fam selection set $i + $top.f.fam see $i + } +} + +proc centertext {w} { + $w coords text [expr {[winfo width $w] / 2}] [expr {[winfo height $w] / 2}] +} + +proc fontok {} { + global fontparam fontpref prefstop + + set f $fontparam(font) + set fontpref($f) [list $fontparam(family) $fontparam(size)] + if {$fontparam(weight) eq "bold"} { + lappend fontpref($f) "bold" + } + if {$fontparam(slant) eq "italic"} { + lappend fontpref($f) "italic" + } + set w $prefstop.$f + $w conf -text $fontparam(family) -font $fontpref($f) + + fontcan +} + +proc fontcan {} { + global fonttop fontparam + + if {[info exists fonttop]} { + catch {destroy $fonttop} + catch {font delete sample} + unset fonttop + unset fontparam + } +} + +if {[package vsatisfies [package provide Tk] 8.6]} { + # In Tk 8.6 we have a native font chooser dialog. Overwrite the above + # function to make use of it. + proc choosefont {font which} { + tk fontchooser configure -title $which -font $font \ + -command [list on_choosefont $font $which] + tk fontchooser show + } + proc on_choosefont {font which newfont} { + global fontparam + puts stderr "$font $newfont" + array set f [font actual $newfont] + set fontparam(which) $which + set fontparam(font) $font + set fontparam(family) $f(-family) + set fontparam(size) $f(-size) + set fontparam(weight) $f(-weight) + set fontparam(slant) $f(-slant) + fontok + } +} + +proc selfontfam {} { + global fonttop fontparam + + set i [$fonttop.f.fam curselection] + if {$i ne {}} { + set fontparam(family) [$fonttop.f.fam get $i] + } +} + +proc chg_fontparam {v sub op} { + global fontparam + + font config sample -$sub $fontparam($sub) +} + +proc doprefs {} { + global maxwidth maxgraphpct use_ttk NS + global oldprefs prefstop showneartags showlocalchanges + global uicolor bgcolor fgcolor ctext diffcolors selectbgcolor markbgcolor + global tabstop limitdiffs autoselect autosellen extdifftool perfile_attrs + global hideremotes want_ttk have_ttk + + set top .gitkprefs + set prefstop $top + if {[winfo exists $top]} { + raise $top + return + } + foreach v {maxwidth maxgraphpct showneartags showlocalchanges \ + limitdiffs tabstop perfile_attrs hideremotes want_ttk} { + set oldprefs($v) [set $v] + } + ttk_toplevel $top + wm title $top [mc "Gitk preferences"] + make_transient $top . + ${NS}::label $top.ldisp -text [mc "Commit list display options"] + grid $top.ldisp - -sticky w -pady 10 + ${NS}::label $top.spacer -text " " + ${NS}::label $top.maxwidthl -text [mc "Maximum graph width (lines)"] + spinbox $top.maxwidth -from 0 -to 100 -width 4 -textvariable maxwidth + grid $top.spacer $top.maxwidthl $top.maxwidth -sticky w + ${NS}::label $top.maxpctl -text [mc "Maximum graph width (% of pane)"] + spinbox $top.maxpct -from 1 -to 100 -width 4 -textvariable maxgraphpct + grid x $top.maxpctl $top.maxpct -sticky w + ${NS}::checkbutton $top.showlocal -text [mc "Show local changes"] \ + -variable showlocalchanges + grid x $top.showlocal -sticky w + ${NS}::checkbutton $top.autoselect -text [mc "Auto-select SHA1 (length)"] \ + -variable autoselect + spinbox $top.autosellen -from 1 -to 40 -width 4 -textvariable autosellen + grid x $top.autoselect $top.autosellen -sticky w + ${NS}::checkbutton $top.hideremotes -text [mc "Hide remote refs"] \ + -variable hideremotes + grid x $top.hideremotes -sticky w + + ${NS}::label $top.ddisp -text [mc "Diff display options"] + grid $top.ddisp - -sticky w -pady 10 + ${NS}::label $top.tabstopl -text [mc "Tab spacing"] + spinbox $top.tabstop -from 1 -to 20 -width 4 -textvariable tabstop + grid x $top.tabstopl $top.tabstop -sticky w + ${NS}::checkbutton $top.ntag -text [mc "Display nearby tags"] \ + -variable showneartags + grid x $top.ntag -sticky w + ${NS}::checkbutton $top.ldiff -text [mc "Limit diffs to listed paths"] \ + -variable limitdiffs + grid x $top.ldiff -sticky w + ${NS}::checkbutton $top.lattr -text [mc "Support per-file encodings"] \ + -variable perfile_attrs + grid x $top.lattr -sticky w + + ${NS}::entry $top.extdifft -textvariable extdifftool + ${NS}::frame $top.extdifff + ${NS}::label $top.extdifff.l -text [mc "External diff tool" ] + ${NS}::button $top.extdifff.b -text [mc "Choose..."] -command choose_extdiff + pack $top.extdifff.l $top.extdifff.b -side left + pack configure $top.extdifff.l -padx 10 + grid x $top.extdifff $top.extdifft -sticky ew + + ${NS}::label $top.lgen -text [mc "General options"] + grid $top.lgen - -sticky w -pady 10 + ${NS}::checkbutton $top.want_ttk -variable want_ttk \ + -text [mc "Use themed widgets"] + if {$have_ttk} { + ${NS}::label $top.ttk_note -text [mc "(change requires restart)"] + } else { + ${NS}::label $top.ttk_note -text [mc "(currently unavailable)"] + } + grid x $top.want_ttk $top.ttk_note -sticky w + + ${NS}::label $top.cdisp -text [mc "Colors: press to choose"] + grid $top.cdisp - -sticky w -pady 10 + label $top.ui -padx 40 -relief sunk -background $uicolor + ${NS}::button $top.uibut -text [mc "Interface"] \ + -command [list choosecolor uicolor {} $top.ui [mc "interface"] setui] + grid x $top.uibut $top.ui -sticky w + label $top.bg -padx 40 -relief sunk -background $bgcolor + ${NS}::button $top.bgbut -text [mc "Background"] \ + -command [list choosecolor bgcolor {} $top.bg [mc "background"] setbg] + grid x $top.bgbut $top.bg -sticky w + label $top.fg -padx 40 -relief sunk -background $fgcolor + ${NS}::button $top.fgbut -text [mc "Foreground"] \ + -command [list choosecolor fgcolor {} $top.fg [mc "foreground"] setfg] + grid x $top.fgbut $top.fg -sticky w + label $top.diffold -padx 40 -relief sunk -background [lindex $diffcolors 0] + ${NS}::button $top.diffoldbut -text [mc "Diff: old lines"] \ + -command [list choosecolor diffcolors 0 $top.diffold [mc "diff old lines"] \ + [list $ctext tag conf d0 -foreground]] + grid x $top.diffoldbut $top.diffold -sticky w + label $top.diffnew -padx 40 -relief sunk -background [lindex $diffcolors 1] + ${NS}::button $top.diffnewbut -text [mc "Diff: new lines"] \ + -command [list choosecolor diffcolors 1 $top.diffnew [mc "diff new lines"] \ + [list $ctext tag conf dresult -foreground]] + grid x $top.diffnewbut $top.diffnew -sticky w + label $top.hunksep -padx 40 -relief sunk -background [lindex $diffcolors 2] + ${NS}::button $top.hunksepbut -text [mc "Diff: hunk header"] \ + -command [list choosecolor diffcolors 2 $top.hunksep \ + [mc "diff hunk header"] \ + [list $ctext tag conf hunksep -foreground]] + grid x $top.hunksepbut $top.hunksep -sticky w + label $top.markbgsep -padx 40 -relief sunk -background $markbgcolor + ${NS}::button $top.markbgbut -text [mc "Marked line bg"] \ + -command [list choosecolor markbgcolor {} $top.markbgsep \ + [mc "marked line background"] \ + [list $ctext tag conf omark -background]] + grid x $top.markbgbut $top.markbgsep -sticky w + label $top.selbgsep -padx 40 -relief sunk -background $selectbgcolor + ${NS}::button $top.selbgbut -text [mc "Select bg"] \ + -command [list choosecolor selectbgcolor {} $top.selbgsep [mc "background"] setselbg] + grid x $top.selbgbut $top.selbgsep -sticky w + + ${NS}::label $top.cfont -text [mc "Fonts: press to choose"] + grid $top.cfont - -sticky w -pady 10 + mkfontdisp mainfont $top [mc "Main font"] + mkfontdisp textfont $top [mc "Diff display font"] + mkfontdisp uifont $top [mc "User interface font"] + + ${NS}::frame $top.buts + ${NS}::button $top.buts.ok -text [mc "OK"] -command prefsok -default active + ${NS}::button $top.buts.can -text [mc "Cancel"] -command prefscan -default normal + bind $top prefsok + bind $top prefscan + grid $top.buts.ok $top.buts.can + grid columnconfigure $top.buts 0 -weight 1 -uniform a + grid columnconfigure $top.buts 1 -weight 1 -uniform a + grid $top.buts - - -pady 10 -sticky ew + grid columnconfigure $top 2 -weight 1 + bind $top "focus $top.buts.ok" +} + +proc choose_extdiff {} { + global extdifftool + + set prog [tk_getOpenFile -title [mc "External diff tool"] -multiple false] + if {$prog ne {}} { + set extdifftool $prog + } +} + +proc choosecolor {v vi w x cmd} { + global $v + + set c [tk_chooseColor -initialcolor [lindex [set $v] $vi] \ + -title [mc "Gitk: choose color for %s" $x]] + if {$c eq {}} return + $w conf -background $c + lset $v $vi $c + eval $cmd $c +} + +proc setselbg {c} { + global bglist cflist + foreach w $bglist { + $w configure -selectbackground $c + } + $cflist tag configure highlight \ + -background [$cflist cget -selectbackground] + allcanvs itemconf secsel -fill $c +} + +# This sets the background color and the color scheme for the whole UI. +# For some reason, tk_setPalette chooses a nasty dark red for selectColor +# if we don't specify one ourselves, which makes the checkbuttons and +# radiobuttons look bad. This chooses white for selectColor if the +# background color is light, or black if it is dark. +proc setui {c} { + if {[tk windowingsystem] eq "win32"} { return } + set bg [winfo rgb . $c] + set selc black + if {[lindex $bg 0] + 1.5 * [lindex $bg 1] + 0.5 * [lindex $bg 2] > 100000} { + set selc white + } + tk_setPalette background $c selectColor $selc +} + +proc setbg {c} { + global bglist + + foreach w $bglist { + $w conf -background $c + } +} + +proc setfg {c} { + global fglist canv + + foreach w $fglist { + $w conf -foreground $c + } + allcanvs itemconf text -fill $c + $canv itemconf circle -outline $c + $canv itemconf markid -outline $c +} + +proc prefscan {} { + global oldprefs prefstop + + foreach v {maxwidth maxgraphpct showneartags showlocalchanges \ + limitdiffs tabstop perfile_attrs hideremotes want_ttk} { + global $v + set $v $oldprefs($v) + } + catch {destroy $prefstop} + unset prefstop + fontcan +} + +proc prefsok {} { + global maxwidth maxgraphpct + global oldprefs prefstop showneartags showlocalchanges + global fontpref mainfont textfont uifont + global limitdiffs treediffs perfile_attrs + global hideremotes + + catch {destroy $prefstop} + unset prefstop + fontcan + set fontchanged 0 + if {$mainfont ne $fontpref(mainfont)} { + set mainfont $fontpref(mainfont) + parsefont mainfont $mainfont + eval font configure mainfont [fontflags mainfont] + eval font configure mainfontbold [fontflags mainfont 1] + setcoords + set fontchanged 1 + } + if {$textfont ne $fontpref(textfont)} { + set textfont $fontpref(textfont) + parsefont textfont $textfont + eval font configure textfont [fontflags textfont] + eval font configure textfontbold [fontflags textfont 1] + } + if {$uifont ne $fontpref(uifont)} { + set uifont $fontpref(uifont) + parsefont uifont $uifont + eval font configure uifont [fontflags uifont] + } + settabs + if {$showlocalchanges != $oldprefs(showlocalchanges)} { + if {$showlocalchanges} { + doshowlocalchanges + } else { + dohidelocalchanges + } + } + if {$limitdiffs != $oldprefs(limitdiffs) || + ($perfile_attrs && !$oldprefs(perfile_attrs))} { + # treediffs elements are limited by path; + # won't have encodings cached if perfile_attrs was just turned on + catch {unset treediffs} + } + if {$fontchanged || $maxwidth != $oldprefs(maxwidth) + || $maxgraphpct != $oldprefs(maxgraphpct)} { + redisplay + } elseif {$showneartags != $oldprefs(showneartags) || + $limitdiffs != $oldprefs(limitdiffs)} { + reselectline + } + if {$hideremotes != $oldprefs(hideremotes)} { + rereadrefs + } +} + +proc formatdate {d} { + global datetimeformat + if {$d ne {}} { + set d [clock format $d -format $datetimeformat] + } + return $d +} + +# This list of encoding names and aliases is distilled from +# http://www.iana.org/assignments/character-sets. +# Not all of them are supported by Tcl. +set encoding_aliases { + { ANSI_X3.4-1968 iso-ir-6 ANSI_X3.4-1986 ISO_646.irv:1991 ASCII + ISO646-US US-ASCII us IBM367 cp367 csASCII } + { ISO-10646-UTF-1 csISO10646UTF1 } + { ISO_646.basic:1983 ref csISO646basic1983 } + { INVARIANT csINVARIANT } + { ISO_646.irv:1983 iso-ir-2 irv csISO2IntlRefVersion } + { BS_4730 iso-ir-4 ISO646-GB gb uk csISO4UnitedKingdom } + { NATS-SEFI iso-ir-8-1 csNATSSEFI } + { NATS-SEFI-ADD iso-ir-8-2 csNATSSEFIADD } + { NATS-DANO iso-ir-9-1 csNATSDANO } + { NATS-DANO-ADD iso-ir-9-2 csNATSDANOADD } + { SEN_850200_B iso-ir-10 FI ISO646-FI ISO646-SE se csISO10Swedish } + { SEN_850200_C iso-ir-11 ISO646-SE2 se2 csISO11SwedishForNames } + { KS_C_5601-1987 iso-ir-149 KS_C_5601-1989 KSC_5601 korean csKSC56011987 } + { ISO-2022-KR csISO2022KR } + { EUC-KR csEUCKR } + { ISO-2022-JP csISO2022JP } + { ISO-2022-JP-2 csISO2022JP2 } + { JIS_C6220-1969-jp JIS_C6220-1969 iso-ir-13 katakana x0201-7 + csISO13JISC6220jp } + { JIS_C6220-1969-ro iso-ir-14 jp ISO646-JP csISO14JISC6220ro } + { IT iso-ir-15 ISO646-IT csISO15Italian } + { PT iso-ir-16 ISO646-PT csISO16Portuguese } + { ES iso-ir-17 ISO646-ES csISO17Spanish } + { greek7-old iso-ir-18 csISO18Greek7Old } + { latin-greek iso-ir-19 csISO19LatinGreek } + { DIN_66003 iso-ir-21 de ISO646-DE csISO21German } + { NF_Z_62-010_(1973) iso-ir-25 ISO646-FR1 csISO25French } + { Latin-greek-1 iso-ir-27 csISO27LatinGreek1 } + { ISO_5427 iso-ir-37 csISO5427Cyrillic } + { JIS_C6226-1978 iso-ir-42 csISO42JISC62261978 } + { BS_viewdata iso-ir-47 csISO47BSViewdata } + { INIS iso-ir-49 csISO49INIS } + { INIS-8 iso-ir-50 csISO50INIS8 } + { INIS-cyrillic iso-ir-51 csISO51INISCyrillic } + { ISO_5427:1981 iso-ir-54 ISO5427Cyrillic1981 } + { ISO_5428:1980 iso-ir-55 csISO5428Greek } + { GB_1988-80 iso-ir-57 cn ISO646-CN csISO57GB1988 } + { GB_2312-80 iso-ir-58 chinese csISO58GB231280 } + { NS_4551-1 iso-ir-60 ISO646-NO no csISO60DanishNorwegian + csISO60Norwegian1 } + { NS_4551-2 ISO646-NO2 iso-ir-61 no2 csISO61Norwegian2 } + { NF_Z_62-010 iso-ir-69 ISO646-FR fr csISO69French } + { videotex-suppl iso-ir-70 csISO70VideotexSupp1 } + { PT2 iso-ir-84 ISO646-PT2 csISO84Portuguese2 } + { ES2 iso-ir-85 ISO646-ES2 csISO85Spanish2 } + { MSZ_7795.3 iso-ir-86 ISO646-HU hu csISO86Hungarian } + { JIS_C6226-1983 iso-ir-87 x0208 JIS_X0208-1983 csISO87JISX0208 } + { greek7 iso-ir-88 csISO88Greek7 } + { ASMO_449 ISO_9036 arabic7 iso-ir-89 csISO89ASMO449 } + { iso-ir-90 csISO90 } + { JIS_C6229-1984-a iso-ir-91 jp-ocr-a csISO91JISC62291984a } + { JIS_C6229-1984-b iso-ir-92 ISO646-JP-OCR-B jp-ocr-b + csISO92JISC62991984b } + { JIS_C6229-1984-b-add iso-ir-93 jp-ocr-b-add csISO93JIS62291984badd } + { JIS_C6229-1984-hand iso-ir-94 jp-ocr-hand csISO94JIS62291984hand } + { JIS_C6229-1984-hand-add iso-ir-95 jp-ocr-hand-add + csISO95JIS62291984handadd } + { JIS_C6229-1984-kana iso-ir-96 csISO96JISC62291984kana } + { ISO_2033-1983 iso-ir-98 e13b csISO2033 } + { ANSI_X3.110-1983 iso-ir-99 CSA_T500-1983 NAPLPS csISO99NAPLPS } + { ISO_8859-1:1987 iso-ir-100 ISO_8859-1 ISO-8859-1 latin1 l1 IBM819 + CP819 csISOLatin1 } + { ISO_8859-2:1987 iso-ir-101 ISO_8859-2 ISO-8859-2 latin2 l2 csISOLatin2 } + { T.61-7bit iso-ir-102 csISO102T617bit } + { T.61-8bit T.61 iso-ir-103 csISO103T618bit } + { ISO_8859-3:1988 iso-ir-109 ISO_8859-3 ISO-8859-3 latin3 l3 csISOLatin3 } + { ISO_8859-4:1988 iso-ir-110 ISO_8859-4 ISO-8859-4 latin4 l4 csISOLatin4 } + { ECMA-cyrillic iso-ir-111 KOI8-E csISO111ECMACyrillic } + { CSA_Z243.4-1985-1 iso-ir-121 ISO646-CA csa7-1 ca csISO121Canadian1 } + { CSA_Z243.4-1985-2 iso-ir-122 ISO646-CA2 csa7-2 csISO122Canadian2 } + { CSA_Z243.4-1985-gr iso-ir-123 csISO123CSAZ24341985gr } + { ISO_8859-6:1987 iso-ir-127 ISO_8859-6 ISO-8859-6 ECMA-114 ASMO-708 + arabic csISOLatinArabic } + { ISO_8859-6-E csISO88596E ISO-8859-6-E } + { ISO_8859-6-I csISO88596I ISO-8859-6-I } + { ISO_8859-7:1987 iso-ir-126 ISO_8859-7 ISO-8859-7 ELOT_928 ECMA-118 + greek greek8 csISOLatinGreek } + { T.101-G2 iso-ir-128 csISO128T101G2 } + { ISO_8859-8:1988 iso-ir-138 ISO_8859-8 ISO-8859-8 hebrew + csISOLatinHebrew } + { ISO_8859-8-E csISO88598E ISO-8859-8-E } + { ISO_8859-8-I csISO88598I ISO-8859-8-I } + { CSN_369103 iso-ir-139 csISO139CSN369103 } + { JUS_I.B1.002 iso-ir-141 ISO646-YU js yu csISO141JUSIB1002 } + { ISO_6937-2-add iso-ir-142 csISOTextComm } + { IEC_P27-1 iso-ir-143 csISO143IECP271 } + { ISO_8859-5:1988 iso-ir-144 ISO_8859-5 ISO-8859-5 cyrillic + csISOLatinCyrillic } + { JUS_I.B1.003-serb iso-ir-146 serbian csISO146Serbian } + { JUS_I.B1.003-mac macedonian iso-ir-147 csISO147Macedonian } + { ISO_8859-9:1989 iso-ir-148 ISO_8859-9 ISO-8859-9 latin5 l5 csISOLatin5 } + { greek-ccitt iso-ir-150 csISO150 csISO150GreekCCITT } + { NC_NC00-10:81 cuba iso-ir-151 ISO646-CU csISO151Cuba } + { ISO_6937-2-25 iso-ir-152 csISO6937Add } + { GOST_19768-74 ST_SEV_358-88 iso-ir-153 csISO153GOST1976874 } + { ISO_8859-supp iso-ir-154 latin1-2-5 csISO8859Supp } + { ISO_10367-box iso-ir-155 csISO10367Box } + { ISO-8859-10 iso-ir-157 l6 ISO_8859-10:1992 csISOLatin6 latin6 } + { latin-lap lap iso-ir-158 csISO158Lap } + { JIS_X0212-1990 x0212 iso-ir-159 csISO159JISX02121990 } + { DS_2089 DS2089 ISO646-DK dk csISO646Danish } + { us-dk csUSDK } + { dk-us csDKUS } + { JIS_X0201 X0201 csHalfWidthKatakana } + { KSC5636 ISO646-KR csKSC5636 } + { ISO-10646-UCS-2 csUnicode } + { ISO-10646-UCS-4 csUCS4 } + { DEC-MCS dec csDECMCS } + { hp-roman8 roman8 r8 csHPRoman8 } + { macintosh mac csMacintosh } + { IBM037 cp037 ebcdic-cp-us ebcdic-cp-ca ebcdic-cp-wt ebcdic-cp-nl + csIBM037 } + { IBM038 EBCDIC-INT cp038 csIBM038 } + { IBM273 CP273 csIBM273 } + { IBM274 EBCDIC-BE CP274 csIBM274 } + { IBM275 EBCDIC-BR cp275 csIBM275 } + { IBM277 EBCDIC-CP-DK EBCDIC-CP-NO csIBM277 } + { IBM278 CP278 ebcdic-cp-fi ebcdic-cp-se csIBM278 } + { IBM280 CP280 ebcdic-cp-it csIBM280 } + { IBM281 EBCDIC-JP-E cp281 csIBM281 } + { IBM284 CP284 ebcdic-cp-es csIBM284 } + { IBM285 CP285 ebcdic-cp-gb csIBM285 } + { IBM290 cp290 EBCDIC-JP-kana csIBM290 } + { IBM297 cp297 ebcdic-cp-fr csIBM297 } + { IBM420 cp420 ebcdic-cp-ar1 csIBM420 } + { IBM423 cp423 ebcdic-cp-gr csIBM423 } + { IBM424 cp424 ebcdic-cp-he csIBM424 } + { IBM437 cp437 437 csPC8CodePage437 } + { IBM500 CP500 ebcdic-cp-be ebcdic-cp-ch csIBM500 } + { IBM775 cp775 csPC775Baltic } + { IBM850 cp850 850 csPC850Multilingual } + { IBM851 cp851 851 csIBM851 } + { IBM852 cp852 852 csPCp852 } + { IBM855 cp855 855 csIBM855 } + { IBM857 cp857 857 csIBM857 } + { IBM860 cp860 860 csIBM860 } + { IBM861 cp861 861 cp-is csIBM861 } + { IBM862 cp862 862 csPC862LatinHebrew } + { IBM863 cp863 863 csIBM863 } + { IBM864 cp864 csIBM864 } + { IBM865 cp865 865 csIBM865 } + { IBM866 cp866 866 csIBM866 } + { IBM868 CP868 cp-ar csIBM868 } + { IBM869 cp869 869 cp-gr csIBM869 } + { IBM870 CP870 ebcdic-cp-roece ebcdic-cp-yu csIBM870 } + { IBM871 CP871 ebcdic-cp-is csIBM871 } + { IBM880 cp880 EBCDIC-Cyrillic csIBM880 } + { IBM891 cp891 csIBM891 } + { IBM903 cp903 csIBM903 } + { IBM904 cp904 904 csIBBM904 } + { IBM905 CP905 ebcdic-cp-tr csIBM905 } + { IBM918 CP918 ebcdic-cp-ar2 csIBM918 } + { IBM1026 CP1026 csIBM1026 } + { EBCDIC-AT-DE csIBMEBCDICATDE } + { EBCDIC-AT-DE-A csEBCDICATDEA } + { EBCDIC-CA-FR csEBCDICCAFR } + { EBCDIC-DK-NO csEBCDICDKNO } + { EBCDIC-DK-NO-A csEBCDICDKNOA } + { EBCDIC-FI-SE csEBCDICFISE } + { EBCDIC-FI-SE-A csEBCDICFISEA } + { EBCDIC-FR csEBCDICFR } + { EBCDIC-IT csEBCDICIT } + { EBCDIC-PT csEBCDICPT } + { EBCDIC-ES csEBCDICES } + { EBCDIC-ES-A csEBCDICESA } + { EBCDIC-ES-S csEBCDICESS } + { EBCDIC-UK csEBCDICUK } + { EBCDIC-US csEBCDICUS } + { UNKNOWN-8BIT csUnknown8BiT } + { MNEMONIC csMnemonic } + { MNEM csMnem } + { VISCII csVISCII } + { VIQR csVIQR } + { KOI8-R csKOI8R } + { IBM00858 CCSID00858 CP00858 PC-Multilingual-850+euro } + { IBM00924 CCSID00924 CP00924 ebcdic-Latin9--euro } + { IBM01140 CCSID01140 CP01140 ebcdic-us-37+euro } + { IBM01141 CCSID01141 CP01141 ebcdic-de-273+euro } + { IBM01142 CCSID01142 CP01142 ebcdic-dk-277+euro ebcdic-no-277+euro } + { IBM01143 CCSID01143 CP01143 ebcdic-fi-278+euro ebcdic-se-278+euro } + { IBM01144 CCSID01144 CP01144 ebcdic-it-280+euro } + { IBM01145 CCSID01145 CP01145 ebcdic-es-284+euro } + { IBM01146 CCSID01146 CP01146 ebcdic-gb-285+euro } + { IBM01147 CCSID01147 CP01147 ebcdic-fr-297+euro } + { IBM01148 CCSID01148 CP01148 ebcdic-international-500+euro } + { IBM01149 CCSID01149 CP01149 ebcdic-is-871+euro } + { IBM1047 IBM-1047 } + { PTCP154 csPTCP154 PT154 CP154 Cyrillic-Asian } + { Amiga-1251 Ami1251 Amiga1251 Ami-1251 } + { UNICODE-1-1 csUnicode11 } + { CESU-8 csCESU-8 } + { BOCU-1 csBOCU-1 } + { UNICODE-1-1-UTF-7 csUnicode11UTF7 } + { ISO-8859-14 iso-ir-199 ISO_8859-14:1998 ISO_8859-14 latin8 iso-celtic + l8 } + { ISO-8859-15 ISO_8859-15 Latin-9 } + { ISO-8859-16 iso-ir-226 ISO_8859-16:2001 ISO_8859-16 latin10 l10 } + { GBK CP936 MS936 windows-936 } + { JIS_Encoding csJISEncoding } + { Shift_JIS MS_Kanji csShiftJIS ShiftJIS Shift-JIS } + { Extended_UNIX_Code_Packed_Format_for_Japanese csEUCPkdFmtJapanese + EUC-JP } + { Extended_UNIX_Code_Fixed_Width_for_Japanese csEUCFixWidJapanese } + { ISO-10646-UCS-Basic csUnicodeASCII } + { ISO-10646-Unicode-Latin1 csUnicodeLatin1 ISO-10646 } + { ISO-Unicode-IBM-1261 csUnicodeIBM1261 } + { ISO-Unicode-IBM-1268 csUnicodeIBM1268 } + { ISO-Unicode-IBM-1276 csUnicodeIBM1276 } + { ISO-Unicode-IBM-1264 csUnicodeIBM1264 } + { ISO-Unicode-IBM-1265 csUnicodeIBM1265 } + { ISO-8859-1-Windows-3.0-Latin-1 csWindows30Latin1 } + { ISO-8859-1-Windows-3.1-Latin-1 csWindows31Latin1 } + { ISO-8859-2-Windows-Latin-2 csWindows31Latin2 } + { ISO-8859-9-Windows-Latin-5 csWindows31Latin5 } + { Adobe-Standard-Encoding csAdobeStandardEncoding } + { Ventura-US csVenturaUS } + { Ventura-International csVenturaInternational } + { PC8-Danish-Norwegian csPC8DanishNorwegian } + { PC8-Turkish csPC8Turkish } + { IBM-Symbols csIBMSymbols } + { IBM-Thai csIBMThai } + { HP-Legal csHPLegal } + { HP-Pi-font csHPPiFont } + { HP-Math8 csHPMath8 } + { Adobe-Symbol-Encoding csHPPSMath } + { HP-DeskTop csHPDesktop } + { Ventura-Math csVenturaMath } + { Microsoft-Publishing csMicrosoftPublishing } + { Windows-31J csWindows31J } + { GB2312 csGB2312 } + { Big5 csBig5 } +} + +proc tcl_encoding {enc} { + global encoding_aliases tcl_encoding_cache + if {[info exists tcl_encoding_cache($enc)]} { + return $tcl_encoding_cache($enc) + } + set names [encoding names] + set lcnames [string tolower $names] + set enc [string tolower $enc] + set i [lsearch -exact $lcnames $enc] + if {$i < 0} { + # look for "isonnn" instead of "iso-nnn" or "iso_nnn" + if {[regsub {^(iso|cp|ibm|jis)[-_]} $enc {\1} encx]} { + set i [lsearch -exact $lcnames $encx] + } + } + if {$i < 0} { + foreach l $encoding_aliases { + set ll [string tolower $l] + if {[lsearch -exact $ll $enc] < 0} continue + # look through the aliases for one that tcl knows about + foreach e $ll { + set i [lsearch -exact $lcnames $e] + if {$i < 0} { + if {[regsub {^(iso|cp|ibm|jis)[-_]} $e {\1} ex]} { + set i [lsearch -exact $lcnames $ex] + } + } + if {$i >= 0} break + } + break + } + } + set tclenc {} + if {$i >= 0} { + set tclenc [lindex $names $i] + } + set tcl_encoding_cache($enc) $tclenc + return $tclenc +} + +proc gitattr {path attr default} { + global path_attr_cache + if {[info exists path_attr_cache($attr,$path)]} { + set r $path_attr_cache($attr,$path) + } else { + set r "unspecified" + if {![catch {set line [exec git check-attr $attr -- $path]}]} { + regexp "(.*): $attr: (.*)" $line m f r + } + set path_attr_cache($attr,$path) $r + } + if {$r eq "unspecified"} { + return $default + } + return $r +} + +proc cache_gitattr {attr pathlist} { + global path_attr_cache + set newlist {} + foreach path $pathlist { + if {![info exists path_attr_cache($attr,$path)]} { + lappend newlist $path + } + } + set lim 1000 + if {[tk windowingsystem] == "win32"} { + # windows has a 32k limit on the arguments to a command... + set lim 30 + } + while {$newlist ne {}} { + set head [lrange $newlist 0 [expr {$lim - 1}]] + set newlist [lrange $newlist $lim end] + if {![catch {set rlist [eval exec git check-attr $attr -- $head]}]} { + foreach row [split $rlist "\n"] { + if {[regexp "(.*): $attr: (.*)" $row m path value]} { + if {[string index $path 0] eq "\""} { + set path [encoding convertfrom [lindex $path 0]] + } + set path_attr_cache($attr,$path) $value + } + } + } + } +} + +proc get_path_encoding {path} { + global gui_encoding perfile_attrs + set tcl_enc $gui_encoding + if {$path ne {} && $perfile_attrs} { + set enc2 [tcl_encoding [gitattr $path encoding $tcl_enc]] + if {$enc2 ne {}} { + set tcl_enc $enc2 + } + } + return $tcl_enc +} + +# First check that Tcl/Tk is recent enough +if {[catch {package require Tk 8.4} err]} { + show_error {} . "Sorry, gitk cannot run with this version of Tcl/Tk.\n\ + Gitk requires at least Tcl/Tk 8.4." list + exit 1 +} + +# defaults... +set wrcomcmd "git diff-tree --stdin -p --pretty" + +set gitencoding {} +catch { + set gitencoding [exec git config --get i18n.commitencoding] +} +catch { + set gitencoding [exec git config --get i18n.logoutputencoding] +} +if {$gitencoding == ""} { + set gitencoding "utf-8" +} +set tclencoding [tcl_encoding $gitencoding] +if {$tclencoding == {}} { + puts stderr "Warning: encoding $gitencoding is not supported by Tcl/Tk" +} + +set gui_encoding [encoding system] +catch { + set enc [exec git config --get gui.encoding] + if {$enc ne {}} { + set tclenc [tcl_encoding $enc] + if {$tclenc ne {}} { + set gui_encoding $tclenc + } else { + puts stderr "Warning: encoding $enc is not supported by Tcl/Tk" + } + } +} + +if {[tk windowingsystem] eq "aqua"} { + set mainfont {{Lucida Grande} 9} + set textfont {Monaco 9} + set uifont {{Lucida Grande} 9 bold} +} else { + set mainfont {Helvetica 9} + set textfont {Courier 9} + set uifont {Helvetica 9 bold} +} +set tabstop 8 +set findmergefiles 0 +set maxgraphpct 50 +set maxwidth 16 +set revlistorder 0 +set fastdate 0 +set uparrowlen 5 +set downarrowlen 5 +set mingaplen 100 +set cmitmode "patch" +set wrapcomment "none" +set showneartags 1 +set hideremotes 0 +set maxrefs 20 +set maxlinelen 200 +set showlocalchanges 1 +set limitdiffs 1 +set datetimeformat "%Y-%m-%d %H:%M:%S" +set autoselect 1 +set autosellen 40 +set perfile_attrs 0 +set want_ttk 1 + +if {[tk windowingsystem] eq "aqua"} { + set extdifftool "opendiff" +} else { + set extdifftool "meld" +} + +set colors {green red blue magenta darkgrey brown orange} +if {[tk windowingsystem] eq "win32"} { + set uicolor SystemButtonFace + set bgcolor SystemWindow + set fgcolor SystemButtonText + set selectbgcolor SystemHighlight +} else { + set uicolor grey85 + set bgcolor white + set fgcolor black + set selectbgcolor gray85 +} +set diffcolors {red "#00a000" blue} +set diffcontext 3 +set ignorespace 0 +set worddiff "" +set markbgcolor "#e0e0ff" + +set circlecolors {white blue gray blue blue} + +# button for popping up context menus +if {[tk windowingsystem] eq "aqua"} { + set ctxbut +} else { + set ctxbut +} + +## For msgcat loading, first locate the installation location. +if { [info exists ::env(GITK_MSGSDIR)] } { + ## Msgsdir was manually set in the environment. + set gitk_msgsdir $::env(GITK_MSGSDIR) +} else { + ## Let's guess the prefix from argv0. + set gitk_prefix [file dirname [file dirname [file normalize $argv0]]] + set gitk_libdir [file join $gitk_prefix share gitk lib] + set gitk_msgsdir [file join $gitk_libdir msgs] + unset gitk_prefix +} + +## Internationalization (i18n) through msgcat and gettext. See +## http://www.gnu.org/software/gettext/manual/html_node/Tcl.html +package require msgcat +namespace import ::msgcat::mc +## And eventually load the actual message catalog +::msgcat::mcload $gitk_msgsdir + +catch {source ~/.gitk} + +parsefont mainfont $mainfont +eval font create mainfont [fontflags mainfont] +eval font create mainfontbold [fontflags mainfont 1] + +parsefont textfont $textfont +eval font create textfont [fontflags textfont] +eval font create textfontbold [fontflags textfont 1] + +parsefont uifont $uifont +eval font create uifont [fontflags uifont] + +setui $uicolor + +setoptions + +# check that we can find a .git directory somewhere... +if {[catch {set gitdir [gitdir]}]} { + show_error {} . [mc "Cannot find a git repository here."] + exit 1 +} +if {![file isdirectory $gitdir]} { + show_error {} . [mc "Cannot find the git directory \"%s\"." $gitdir] + exit 1 +} + +set selecthead {} +set selectheadid {} + +set revtreeargs {} +set cmdline_files {} +set i 0 +set revtreeargscmd {} +foreach arg $argv { + switch -glob -- $arg { + "" { } + "--" { + set cmdline_files [lrange $argv [expr {$i + 1}] end] + break + } + "--select-commit=*" { + set selecthead [string range $arg 16 end] + } + "--argscmd=*" { + set revtreeargscmd [string range $arg 10 end] + } + default { + lappend revtreeargs $arg + } + } + incr i +} + +if {$selecthead eq "HEAD"} { + set selecthead {} +} + +if {$i >= [llength $argv] && $revtreeargs ne {}} { + # no -- on command line, but some arguments (other than --argscmd) + if {[catch { + set f [eval exec git rev-parse --no-revs --no-flags $revtreeargs] + set cmdline_files [split $f "\n"] + set n [llength $cmdline_files] + set revtreeargs [lrange $revtreeargs 0 end-$n] + # Unfortunately git rev-parse doesn't produce an error when + # something is both a revision and a filename. To be consistent + # with git log and git rev-list, check revtreeargs for filenames. + foreach arg $revtreeargs { + if {[file exists $arg]} { + show_error {} . [mc "Ambiguous argument '%s': both revision\ + and filename" $arg] + exit 1 + } + } + } err]} { + # unfortunately we get both stdout and stderr in $err, + # so look for "fatal:". + set i [string first "fatal:" $err] + if {$i > 0} { + set err [string range $err [expr {$i + 6}] end] + } + show_error {} . "[mc "Bad arguments to gitk:"]\n$err" + exit 1 + } +} + +set nullid "0000000000000000000000000000000000000000" +set nullid2 "0000000000000000000000000000000000000001" +set nullfile "/dev/null" + +set have_tk85 [expr {[package vcompare $tk_version "8.5"] >= 0}] +if {![info exists have_ttk]} { + set have_ttk [llength [info commands ::ttk::style]] +} +set use_ttk [expr {$have_ttk && $want_ttk}] +set NS [expr {$use_ttk ? "ttk" : ""}] + +regexp {^git version ([\d.]*\d)} [exec git version] _ git_version + +set show_notes {} +if {[package vcompare $git_version "1.6.6.2"] >= 0} { + set show_notes "--show-notes" +} + +set runq {} +set history {} +set historyindex 0 +set fh_serial 0 +set nhl_names {} +set highlight_paths {} +set findpattern {} +set searchdirn -forwards +set boldids {} +set boldnameids {} +set diffelide {0 0} +set markingmatches 0 +set linkentercount 0 +set need_redisplay 0 +set nrows_drawn 0 +set firsttabstop 0 + +set nextviewnum 1 +set curview 0 +set selectedview 0 +set selectedhlview [mc "None"] +set highlight_related [mc "None"] +set highlight_files {} +set viewfiles(0) {} +set viewperm(0) 0 +set viewargs(0) {} +set viewargscmd(0) {} + +set selectedline {} +set numcommits 0 +set loginstance 0 +set cmdlineok 0 +set stopped 0 +set stuffsaved 0 +set patchnum 0 +set lserial 0 +set isworktree [expr {[exec git rev-parse --is-inside-work-tree] == "true"}] +setcoords +makewindow +catch { + image create photo gitlogo -width 16 -height 16 + + image create photo gitlogominus -width 4 -height 2 + gitlogominus put #C00000 -to 0 0 4 2 + gitlogo copy gitlogominus -to 1 5 + gitlogo copy gitlogominus -to 6 5 + gitlogo copy gitlogominus -to 11 5 + image delete gitlogominus + + image create photo gitlogoplus -width 4 -height 4 + gitlogoplus put #008000 -to 1 0 3 4 + gitlogoplus put #008000 -to 0 1 4 3 + gitlogo copy gitlogoplus -to 1 9 + gitlogo copy gitlogoplus -to 6 9 + gitlogo copy gitlogoplus -to 11 9 + image delete gitlogoplus + + image create photo gitlogo32 -width 32 -height 32 + gitlogo32 copy gitlogo -zoom 2 2 + + wm iconphoto . -default gitlogo gitlogo32 +} +# wait for the window to become visible +tkwait visibility . +wm title . "[file tail $argv0]: [file tail [pwd]]" +update +readrefs + +if {$cmdline_files ne {} || $revtreeargs ne {} || $revtreeargscmd ne {}} { + # create a view for the files/dirs specified on the command line + set curview 1 + set selectedview 1 + set nextviewnum 2 + set viewname(1) [mc "Command line"] + set viewfiles(1) $cmdline_files + set viewargs(1) $revtreeargs + set viewargscmd(1) $revtreeargscmd + set viewperm(1) 0 + set vdatemode(1) 0 + addviewmenu 1 + .bar.view entryconf [mca "Edit view..."] -state normal + .bar.view entryconf [mca "Delete view"] -state normal +} + +if {[info exists permviews]} { + foreach v $permviews { + set n $nextviewnum + incr nextviewnum + set viewname($n) [lindex $v 0] + set viewfiles($n) [lindex $v 1] + set viewargs($n) [lindex $v 2] + set viewargscmd($n) [lindex $v 3] + set viewperm($n) 1 + addviewmenu $n + } +} + +if {[tk windowingsystem] eq "win32"} { + focus -force . +} + +getcommits {} + +# Local variables: +# mode: tcl +# indent-tabs-mode: t +# tab-width: 8 +# End: diff --git a/SparkleShare/Mac/git/libexec/git-core/git b/SparkleShare/Mac/git/libexec/git-core/git new file mode 120000 index 00000000..f2cccfc2 --- /dev/null +++ b/SparkleShare/Mac/git/libexec/git-core/git @@ -0,0 +1 @@ +../../bin/git \ No newline at end of file diff --git a/SparkleShare/Mac/git/libexec/git-core/git-add b/SparkleShare/Mac/git/libexec/git-core/git-add new file mode 120000 index 00000000..f2cccfc2 --- /dev/null +++ b/SparkleShare/Mac/git/libexec/git-core/git-add @@ -0,0 +1 @@ +../../bin/git \ No newline at end of file diff --git a/SparkleShare/Mac/git/libexec/git-core/git-add--interactive b/SparkleShare/Mac/git/libexec/git-core/git-add--interactive new file mode 100755 index 00000000..57ec163b --- /dev/null +++ b/SparkleShare/Mac/git/libexec/git-core/git-add--interactive @@ -0,0 +1,1610 @@ +#!/usr/bin/perl +use lib (split(/:/, $ENV{GITPERLLIB} || "/usr/local/git/lib/perl5/site_perl")); + +use 5.008; +use strict; +use warnings; +use Git; + +binmode(STDOUT, ":raw"); + +my $repo = Git->repository(); + +my $menu_use_color = $repo->get_colorbool('color.interactive'); +my ($prompt_color, $header_color, $help_color) = + $menu_use_color ? ( + $repo->get_color('color.interactive.prompt', 'bold blue'), + $repo->get_color('color.interactive.header', 'bold'), + $repo->get_color('color.interactive.help', 'red bold'), + ) : (); +my $error_color = (); +if ($menu_use_color) { + my $help_color_spec = ($repo->config('color.interactive.help') or + 'red bold'); + $error_color = $repo->get_color('color.interactive.error', + $help_color_spec); +} + +my $diff_use_color = $repo->get_colorbool('color.diff'); +my ($fraginfo_color) = + $diff_use_color ? ( + $repo->get_color('color.diff.frag', 'cyan'), + ) : (); +my ($diff_plain_color) = + $diff_use_color ? ( + $repo->get_color('color.diff.plain', ''), + ) : (); +my ($diff_old_color) = + $diff_use_color ? ( + $repo->get_color('color.diff.old', 'red'), + ) : (); +my ($diff_new_color) = + $diff_use_color ? ( + $repo->get_color('color.diff.new', 'green'), + ) : (); + +my $normal_color = $repo->get_color("", "reset"); + +my $use_readkey = 0; +sub ReadMode; +sub ReadKey; +if ($repo->config_bool("interactive.singlekey")) { + eval { + require Term::ReadKey; + Term::ReadKey->import; + $use_readkey = 1; + }; +} + +sub colored { + my $color = shift; + my $string = join("", @_); + + if (defined $color) { + # Put a color code at the beginning of each line, a reset at the end + # color after newlines that are not at the end of the string + $string =~ s/(\n+)(.)/$1$color$2/g; + # reset before newlines + $string =~ s/(\n+)/$normal_color$1/g; + # codes at beginning and end (if necessary): + $string =~ s/^/$color/; + $string =~ s/$/$normal_color/ unless $string =~ /\n$/; + } + return $string; +} + +# command line options +my $patch_mode; +my $patch_mode_revision; + +sub apply_patch; +sub apply_patch_for_checkout_commit; +sub apply_patch_for_stash; + +my %patch_modes = ( + 'stage' => { + DIFF => 'diff-files -p', + APPLY => sub { apply_patch 'apply --cached', @_; }, + APPLY_CHECK => 'apply --cached', + VERB => 'Stage', + TARGET => '', + PARTICIPLE => 'staging', + FILTER => 'file-only', + IS_REVERSE => 0, + }, + 'stash' => { + DIFF => 'diff-index -p HEAD', + APPLY => sub { apply_patch 'apply --cached', @_; }, + APPLY_CHECK => 'apply --cached', + VERB => 'Stash', + TARGET => '', + PARTICIPLE => 'stashing', + FILTER => undef, + IS_REVERSE => 0, + }, + 'reset_head' => { + DIFF => 'diff-index -p --cached', + APPLY => sub { apply_patch 'apply -R --cached', @_; }, + APPLY_CHECK => 'apply -R --cached', + VERB => 'Unstage', + TARGET => '', + PARTICIPLE => 'unstaging', + FILTER => 'index-only', + IS_REVERSE => 1, + }, + 'reset_nothead' => { + DIFF => 'diff-index -R -p --cached', + APPLY => sub { apply_patch 'apply --cached', @_; }, + APPLY_CHECK => 'apply --cached', + VERB => 'Apply', + TARGET => ' to index', + PARTICIPLE => 'applying', + FILTER => 'index-only', + IS_REVERSE => 0, + }, + 'checkout_index' => { + DIFF => 'diff-files -p', + APPLY => sub { apply_patch 'apply -R', @_; }, + APPLY_CHECK => 'apply -R', + VERB => 'Discard', + TARGET => ' from worktree', + PARTICIPLE => 'discarding', + FILTER => 'file-only', + IS_REVERSE => 1, + }, + 'checkout_head' => { + DIFF => 'diff-index -p', + APPLY => sub { apply_patch_for_checkout_commit '-R', @_ }, + APPLY_CHECK => 'apply -R', + VERB => 'Discard', + TARGET => ' from index and worktree', + PARTICIPLE => 'discarding', + FILTER => undef, + IS_REVERSE => 1, + }, + 'checkout_nothead' => { + DIFF => 'diff-index -R -p', + APPLY => sub { apply_patch_for_checkout_commit '', @_ }, + APPLY_CHECK => 'apply', + VERB => 'Apply', + TARGET => ' to index and worktree', + PARTICIPLE => 'applying', + FILTER => undef, + IS_REVERSE => 0, + }, +); + +my %patch_mode_flavour = %{$patch_modes{stage}}; + +sub run_cmd_pipe { + if ($^O eq 'MSWin32' || $^O eq 'msys') { + my @invalid = grep {m/[":*]/} @_; + die "$^O does not support: @invalid\n" if @invalid; + my @args = map { m/ /o ? "\"$_\"": $_ } @_; + return qx{@args}; + } else { + my $fh = undef; + open($fh, '-|', @_) or die; + return <$fh>; + } +} + +my ($GIT_DIR) = run_cmd_pipe(qw(git rev-parse --git-dir)); + +if (!defined $GIT_DIR) { + exit(1); # rev-parse would have already said "not a git repo" +} +chomp($GIT_DIR); + +my %cquote_map = ( + "b" => chr(8), + "t" => chr(9), + "n" => chr(10), + "v" => chr(11), + "f" => chr(12), + "r" => chr(13), + "\\" => "\\", + "\042" => "\042", +); + +sub unquote_path { + local ($_) = @_; + my ($retval, $remainder); + if (!/^\042(.*)\042$/) { + return $_; + } + ($_, $retval) = ($1, ""); + while (/^([^\\]*)\\(.*)$/) { + $remainder = $2; + $retval .= $1; + for ($remainder) { + if (/^([0-3][0-7][0-7])(.*)$/) { + $retval .= chr(oct($1)); + $_ = $2; + last; + } + if (/^([\\\042btnvfr])(.*)$/) { + $retval .= $cquote_map{$1}; + $_ = $2; + last; + } + # This is malformed -- just return it as-is for now. + return $_[0]; + } + $_ = $remainder; + } + $retval .= $_; + return $retval; +} + +sub refresh { + my $fh; + open $fh, 'git update-index --refresh |' + or die; + while (<$fh>) { + ;# ignore 'needs update' + } + close $fh; +} + +sub list_untracked { + map { + chomp $_; + unquote_path($_); + } + run_cmd_pipe(qw(git ls-files --others --exclude-standard --), @ARGV); +} + +my $status_fmt = '%12s %12s %s'; +my $status_head = sprintf($status_fmt, 'staged', 'unstaged', 'path'); + +{ + my $initial; + sub is_initial_commit { + $initial = system('git rev-parse HEAD -- >/dev/null 2>&1') != 0 + unless defined $initial; + return $initial; + } +} + +sub get_empty_tree { + return '4b825dc642cb6eb9a060e54bf8d69288fbee4904'; +} + +# Returns list of hashes, contents of each of which are: +# VALUE: pathname +# BINARY: is a binary path +# INDEX: is index different from HEAD? +# FILE: is file different from index? +# INDEX_ADDDEL: is it add/delete between HEAD and index? +# FILE_ADDDEL: is it add/delete between index and file? + +sub list_modified { + my ($only) = @_; + my (%data, @return); + my ($add, $del, $adddel, $file); + my @tracked = (); + + if (@ARGV) { + @tracked = map { + chomp $_; + unquote_path($_); + } run_cmd_pipe(qw(git ls-files --), @ARGV); + return if (!@tracked); + } + + my $reference; + if (defined $patch_mode_revision and $patch_mode_revision ne 'HEAD') { + $reference = $patch_mode_revision; + } elsif (is_initial_commit()) { + $reference = get_empty_tree(); + } else { + $reference = 'HEAD'; + } + for (run_cmd_pipe(qw(git diff-index --cached + --numstat --summary), $reference, + '--', @tracked)) { + if (($add, $del, $file) = + /^([-\d]+) ([-\d]+) (.*)/) { + my ($change, $bin); + $file = unquote_path($file); + if ($add eq '-' && $del eq '-') { + $change = 'binary'; + $bin = 1; + } + else { + $change = "+$add/-$del"; + } + $data{$file} = { + INDEX => $change, + BINARY => $bin, + FILE => 'nothing', + } + } + elsif (($adddel, $file) = + /^ (create|delete) mode [0-7]+ (.*)$/) { + $file = unquote_path($file); + $data{$file}{INDEX_ADDDEL} = $adddel; + } + } + + for (run_cmd_pipe(qw(git diff-files --numstat --summary --), @tracked)) { + if (($add, $del, $file) = + /^([-\d]+) ([-\d]+) (.*)/) { + $file = unquote_path($file); + if (!exists $data{$file}) { + $data{$file} = +{ + INDEX => 'unchanged', + BINARY => 0, + }; + } + my ($change, $bin); + if ($add eq '-' && $del eq '-') { + $change = 'binary'; + $bin = 1; + } + else { + $change = "+$add/-$del"; + } + $data{$file}{FILE} = $change; + if ($bin) { + $data{$file}{BINARY} = 1; + } + } + elsif (($adddel, $file) = + /^ (create|delete) mode [0-7]+ (.*)$/) { + $file = unquote_path($file); + $data{$file}{FILE_ADDDEL} = $adddel; + } + } + + for (sort keys %data) { + my $it = $data{$_}; + + if ($only) { + if ($only eq 'index-only') { + next if ($it->{INDEX} eq 'unchanged'); + } + if ($only eq 'file-only') { + next if ($it->{FILE} eq 'nothing'); + } + } + push @return, +{ + VALUE => $_, + %$it, + }; + } + return @return; +} + +sub find_unique { + my ($string, @stuff) = @_; + my $found = undef; + for (my $i = 0; $i < @stuff; $i++) { + my $it = $stuff[$i]; + my $hit = undef; + if (ref $it) { + if ((ref $it) eq 'ARRAY') { + $it = $it->[0]; + } + else { + $it = $it->{VALUE}; + } + } + eval { + if ($it =~ /^$string/) { + $hit = 1; + }; + }; + if (defined $hit && defined $found) { + return undef; + } + if ($hit) { + $found = $i + 1; + } + } + return $found; +} + +# inserts string into trie and updates count for each character +sub update_trie { + my ($trie, $string) = @_; + foreach (split //, $string) { + $trie = $trie->{$_} ||= {COUNT => 0}; + $trie->{COUNT}++; + } +} + +# returns an array of tuples (prefix, remainder) +sub find_unique_prefixes { + my @stuff = @_; + my @return = (); + + # any single prefix exceeding the soft limit is omitted + # if any prefix exceeds the hard limit all are omitted + # 0 indicates no limit + my $soft_limit = 0; + my $hard_limit = 3; + + # build a trie modelling all possible options + my %trie; + foreach my $print (@stuff) { + if ((ref $print) eq 'ARRAY') { + $print = $print->[0]; + } + elsif ((ref $print) eq 'HASH') { + $print = $print->{VALUE}; + } + update_trie(\%trie, $print); + push @return, $print; + } + + # use the trie to find the unique prefixes + for (my $i = 0; $i < @return; $i++) { + my $ret = $return[$i]; + my @letters = split //, $ret; + my %search = %trie; + my ($prefix, $remainder); + my $j; + for ($j = 0; $j < @letters; $j++) { + my $letter = $letters[$j]; + if ($search{$letter}{COUNT} == 1) { + $prefix = substr $ret, 0, $j + 1; + $remainder = substr $ret, $j + 1; + last; + } + else { + my $prefix = substr $ret, 0, $j; + return () + if ($hard_limit && $j + 1 > $hard_limit); + } + %search = %{$search{$letter}}; + } + if (ord($letters[0]) > 127 || + ($soft_limit && $j + 1 > $soft_limit)) { + $prefix = undef; + $remainder = $ret; + } + $return[$i] = [$prefix, $remainder]; + } + return @return; +} + +# filters out prefixes which have special meaning to list_and_choose() +sub is_valid_prefix { + my $prefix = shift; + return (defined $prefix) && + !($prefix =~ /[\s,]/) && # separators + !($prefix =~ /^-/) && # deselection + !($prefix =~ /^\d+/) && # selection + ($prefix ne '*') && # "all" wildcard + ($prefix ne '?'); # prompt help +} + +# given a prefix/remainder tuple return a string with the prefix highlighted +# for now use square brackets; later might use ANSI colors (underline, bold) +sub highlight_prefix { + my $prefix = shift; + my $remainder = shift; + + if (!defined $prefix) { + return $remainder; + } + + if (!is_valid_prefix($prefix)) { + return "$prefix$remainder"; + } + + if (!$menu_use_color) { + return "[$prefix]$remainder"; + } + + return "$prompt_color$prefix$normal_color$remainder"; +} + +sub error_msg { + print STDERR colored $error_color, @_; +} + +sub list_and_choose { + my ($opts, @stuff) = @_; + my (@chosen, @return); + my $i; + my @prefixes = find_unique_prefixes(@stuff) unless $opts->{LIST_ONLY}; + + TOPLOOP: + while (1) { + my $last_lf = 0; + + if ($opts->{HEADER}) { + if (!$opts->{LIST_FLAT}) { + print " "; + } + print colored $header_color, "$opts->{HEADER}\n"; + } + for ($i = 0; $i < @stuff; $i++) { + my $chosen = $chosen[$i] ? '*' : ' '; + my $print = $stuff[$i]; + my $ref = ref $print; + my $highlighted = highlight_prefix(@{$prefixes[$i]}) + if @prefixes; + if ($ref eq 'ARRAY') { + $print = $highlighted || $print->[0]; + } + elsif ($ref eq 'HASH') { + my $value = $highlighted || $print->{VALUE}; + $print = sprintf($status_fmt, + $print->{INDEX}, + $print->{FILE}, + $value); + } + else { + $print = $highlighted || $print; + } + printf("%s%2d: %s", $chosen, $i+1, $print); + if (($opts->{LIST_FLAT}) && + (($i + 1) % ($opts->{LIST_FLAT}))) { + print "\t"; + $last_lf = 0; + } + else { + print "\n"; + $last_lf = 1; + } + } + if (!$last_lf) { + print "\n"; + } + + return if ($opts->{LIST_ONLY}); + + print colored $prompt_color, $opts->{PROMPT}; + if ($opts->{SINGLETON}) { + print "> "; + } + else { + print ">> "; + } + my $line = ; + if (!$line) { + print "\n"; + $opts->{ON_EOF}->() if $opts->{ON_EOF}; + last; + } + chomp $line; + last if $line eq ''; + if ($line eq '?') { + $opts->{SINGLETON} ? + singleton_prompt_help_cmd() : + prompt_help_cmd(); + next TOPLOOP; + } + for my $choice (split(/[\s,]+/, $line)) { + my $choose = 1; + my ($bottom, $top); + + # Input that begins with '-'; unchoose + if ($choice =~ s/^-//) { + $choose = 0; + } + # A range can be specified like 5-7 or 5-. + if ($choice =~ /^(\d+)-(\d*)$/) { + ($bottom, $top) = ($1, length($2) ? $2 : 1 + @stuff); + } + elsif ($choice =~ /^\d+$/) { + $bottom = $top = $choice; + } + elsif ($choice eq '*') { + $bottom = 1; + $top = 1 + @stuff; + } + else { + $bottom = $top = find_unique($choice, @stuff); + if (!defined $bottom) { + error_msg "Huh ($choice)?\n"; + next TOPLOOP; + } + } + if ($opts->{SINGLETON} && $bottom != $top) { + error_msg "Huh ($choice)?\n"; + next TOPLOOP; + } + for ($i = $bottom-1; $i <= $top-1; $i++) { + next if (@stuff <= $i || $i < 0); + $chosen[$i] = $choose; + } + } + last if ($opts->{IMMEDIATE} || $line eq '*'); + } + for ($i = 0; $i < @stuff; $i++) { + if ($chosen[$i]) { + push @return, $stuff[$i]; + } + } + return @return; +} + +sub singleton_prompt_help_cmd { + print colored $help_color, <<\EOF ; +Prompt help: +1 - select a numbered item +foo - select item based on unique prefix + - (empty) select nothing +EOF +} + +sub prompt_help_cmd { + print colored $help_color, <<\EOF ; +Prompt help: +1 - select a single item +3-5 - select a range of items +2-3,6-9 - select multiple ranges +foo - select item based on unique prefix +-... - unselect specified items +* - choose all items + - (empty) finish selecting +EOF +} + +sub status_cmd { + list_and_choose({ LIST_ONLY => 1, HEADER => $status_head }, + list_modified()); + print "\n"; +} + +sub say_n_paths { + my $did = shift @_; + my $cnt = scalar @_; + print "$did "; + if (1 < $cnt) { + print "$cnt paths\n"; + } + else { + print "one path\n"; + } +} + +sub update_cmd { + my @mods = list_modified('file-only'); + return if (!@mods); + + my @update = list_and_choose({ PROMPT => 'Update', + HEADER => $status_head, }, + @mods); + if (@update) { + system(qw(git update-index --add --remove --), + map { $_->{VALUE} } @update); + say_n_paths('updated', @update); + } + print "\n"; +} + +sub revert_cmd { + my @update = list_and_choose({ PROMPT => 'Revert', + HEADER => $status_head, }, + list_modified()); + if (@update) { + if (is_initial_commit()) { + system(qw(git rm --cached), + map { $_->{VALUE} } @update); + } + else { + my @lines = run_cmd_pipe(qw(git ls-tree HEAD --), + map { $_->{VALUE} } @update); + my $fh; + open $fh, '| git update-index --index-info' + or die; + for (@lines) { + print $fh $_; + } + close($fh); + for (@update) { + if ($_->{INDEX_ADDDEL} && + $_->{INDEX_ADDDEL} eq 'create') { + system(qw(git update-index --force-remove --), + $_->{VALUE}); + print "note: $_->{VALUE} is untracked now.\n"; + } + } + } + refresh(); + say_n_paths('reverted', @update); + } + print "\n"; +} + +sub add_untracked_cmd { + my @add = list_and_choose({ PROMPT => 'Add untracked' }, + list_untracked()); + if (@add) { + system(qw(git update-index --add --), @add); + say_n_paths('added', @add); + } + print "\n"; +} + +sub run_git_apply { + my $cmd = shift; + my $fh; + open $fh, '| git ' . $cmd; + print $fh @_; + return close $fh; +} + +sub parse_diff { + my ($path) = @_; + my @diff_cmd = split(" ", $patch_mode_flavour{DIFF}); + if (defined $patch_mode_revision) { + push @diff_cmd, $patch_mode_revision; + } + my @diff = run_cmd_pipe("git", @diff_cmd, "--", $path); + my @colored = (); + if ($diff_use_color) { + @colored = run_cmd_pipe("git", @diff_cmd, qw(--color --), $path); + } + my (@hunk) = { TEXT => [], DISPLAY => [], TYPE => 'header' }; + + for (my $i = 0; $i < @diff; $i++) { + if ($diff[$i] =~ /^@@ /) { + push @hunk, { TEXT => [], DISPLAY => [], + TYPE => 'hunk' }; + } + push @{$hunk[-1]{TEXT}}, $diff[$i]; + push @{$hunk[-1]{DISPLAY}}, + ($diff_use_color ? $colored[$i] : $diff[$i]); + } + return @hunk; +} + +sub parse_diff_header { + my $src = shift; + + my $head = { TEXT => [], DISPLAY => [], TYPE => 'header' }; + my $mode = { TEXT => [], DISPLAY => [], TYPE => 'mode' }; + my $deletion = { TEXT => [], DISPLAY => [], TYPE => 'deletion' }; + + for (my $i = 0; $i < @{$src->{TEXT}}; $i++) { + my $dest = + $src->{TEXT}->[$i] =~ /^(old|new) mode (\d+)$/ ? $mode : + $src->{TEXT}->[$i] =~ /^deleted file/ ? $deletion : + $head; + push @{$dest->{TEXT}}, $src->{TEXT}->[$i]; + push @{$dest->{DISPLAY}}, $src->{DISPLAY}->[$i]; + } + return ($head, $mode, $deletion); +} + +sub hunk_splittable { + my ($text) = @_; + + my @s = split_hunk($text); + return (1 < @s); +} + +sub parse_hunk_header { + my ($line) = @_; + my ($o_ofs, $o_cnt, $n_ofs, $n_cnt) = + $line =~ /^@@ -(\d+)(?:,(\d+))? \+(\d+)(?:,(\d+))? @@/; + $o_cnt = 1 unless defined $o_cnt; + $n_cnt = 1 unless defined $n_cnt; + return ($o_ofs, $o_cnt, $n_ofs, $n_cnt); +} + +sub split_hunk { + my ($text, $display) = @_; + my @split = (); + if (!defined $display) { + $display = $text; + } + # If there are context lines in the middle of a hunk, + # it can be split, but we would need to take care of + # overlaps later. + + my ($o_ofs, undef, $n_ofs) = parse_hunk_header($text->[0]); + my $hunk_start = 1; + + OUTER: + while (1) { + my $next_hunk_start = undef; + my $i = $hunk_start - 1; + my $this = +{ + TEXT => [], + DISPLAY => [], + TYPE => 'hunk', + OLD => $o_ofs, + NEW => $n_ofs, + OCNT => 0, + NCNT => 0, + ADDDEL => 0, + POSTCTX => 0, + USE => undef, + }; + + while (++$i < @$text) { + my $line = $text->[$i]; + my $display = $display->[$i]; + if ($line =~ /^ /) { + if ($this->{ADDDEL} && + !defined $next_hunk_start) { + # We have seen leading context and + # adds/dels and then here is another + # context, which is trailing for this + # split hunk and leading for the next + # one. + $next_hunk_start = $i; + } + push @{$this->{TEXT}}, $line; + push @{$this->{DISPLAY}}, $display; + $this->{OCNT}++; + $this->{NCNT}++; + if (defined $next_hunk_start) { + $this->{POSTCTX}++; + } + next; + } + + # add/del + if (defined $next_hunk_start) { + # We are done with the current hunk and + # this is the first real change for the + # next split one. + $hunk_start = $next_hunk_start; + $o_ofs = $this->{OLD} + $this->{OCNT}; + $n_ofs = $this->{NEW} + $this->{NCNT}; + $o_ofs -= $this->{POSTCTX}; + $n_ofs -= $this->{POSTCTX}; + push @split, $this; + redo OUTER; + } + push @{$this->{TEXT}}, $line; + push @{$this->{DISPLAY}}, $display; + $this->{ADDDEL}++; + if ($line =~ /^-/) { + $this->{OCNT}++; + } + else { + $this->{NCNT}++; + } + } + + push @split, $this; + last; + } + + for my $hunk (@split) { + $o_ofs = $hunk->{OLD}; + $n_ofs = $hunk->{NEW}; + my $o_cnt = $hunk->{OCNT}; + my $n_cnt = $hunk->{NCNT}; + + my $head = ("@@ -$o_ofs" . + (($o_cnt != 1) ? ",$o_cnt" : '') . + " +$n_ofs" . + (($n_cnt != 1) ? ",$n_cnt" : '') . + " @@\n"); + my $display_head = $head; + unshift @{$hunk->{TEXT}}, $head; + if ($diff_use_color) { + $display_head = colored($fraginfo_color, $head); + } + unshift @{$hunk->{DISPLAY}}, $display_head; + } + return @split; +} + +sub find_last_o_ctx { + my ($it) = @_; + my $text = $it->{TEXT}; + my ($o_ofs, $o_cnt) = parse_hunk_header($text->[0]); + my $i = @{$text}; + my $last_o_ctx = $o_ofs + $o_cnt; + while (0 < --$i) { + my $line = $text->[$i]; + if ($line =~ /^ /) { + $last_o_ctx--; + next; + } + last; + } + return $last_o_ctx; +} + +sub merge_hunk { + my ($prev, $this) = @_; + my ($o0_ofs, $o0_cnt, $n0_ofs, $n0_cnt) = + parse_hunk_header($prev->{TEXT}[0]); + my ($o1_ofs, $o1_cnt, $n1_ofs, $n1_cnt) = + parse_hunk_header($this->{TEXT}[0]); + + my (@line, $i, $ofs, $o_cnt, $n_cnt); + $ofs = $o0_ofs; + $o_cnt = $n_cnt = 0; + for ($i = 1; $i < @{$prev->{TEXT}}; $i++) { + my $line = $prev->{TEXT}[$i]; + if ($line =~ /^\+/) { + $n_cnt++; + push @line, $line; + next; + } + + last if ($o1_ofs <= $ofs); + + $o_cnt++; + $ofs++; + if ($line =~ /^ /) { + $n_cnt++; + } + push @line, $line; + } + + for ($i = 1; $i < @{$this->{TEXT}}; $i++) { + my $line = $this->{TEXT}[$i]; + if ($line =~ /^\+/) { + $n_cnt++; + push @line, $line; + next; + } + $ofs++; + $o_cnt++; + if ($line =~ /^ /) { + $n_cnt++; + } + push @line, $line; + } + my $head = ("@@ -$o0_ofs" . + (($o_cnt != 1) ? ",$o_cnt" : '') . + " +$n0_ofs" . + (($n_cnt != 1) ? ",$n_cnt" : '') . + " @@\n"); + @{$prev->{TEXT}} = ($head, @line); +} + +sub coalesce_overlapping_hunks { + my (@in) = @_; + my @out = (); + + my ($last_o_ctx, $last_was_dirty); + + for (grep { $_->{USE} } @in) { + if ($_->{TYPE} ne 'hunk') { + push @out, $_; + next; + } + my $text = $_->{TEXT}; + my ($o_ofs) = parse_hunk_header($text->[0]); + if (defined $last_o_ctx && + $o_ofs <= $last_o_ctx && + !$_->{DIRTY} && + !$last_was_dirty) { + merge_hunk($out[-1], $_); + } + else { + push @out, $_; + } + $last_o_ctx = find_last_o_ctx($out[-1]); + $last_was_dirty = $_->{DIRTY}; + } + return @out; +} + +sub reassemble_patch { + my $head = shift; + my @patch; + + # Include everything in the header except the beginning of the diff. + push @patch, (grep { !/^[-+]{3}/ } @$head); + + # Then include any headers from the hunk lines, which must + # come before any actual hunk. + while (@_ && $_[0] !~ /^@/) { + push @patch, shift; + } + + # Then begin the diff. + push @patch, grep { /^[-+]{3}/ } @$head; + + # And then the actual hunks. + push @patch, @_; + + return @patch; +} + +sub color_diff { + return map { + colored((/^@/ ? $fraginfo_color : + /^\+/ ? $diff_new_color : + /^-/ ? $diff_old_color : + $diff_plain_color), + $_); + } @_; +} + +sub edit_hunk_manually { + my ($oldtext) = @_; + + my $hunkfile = $repo->repo_path . "/addp-hunk-edit.diff"; + my $fh; + open $fh, '>', $hunkfile + or die "failed to open hunk edit file for writing: " . $!; + print $fh "# Manual hunk edit mode -- see bottom for a quick guide\n"; + print $fh @$oldtext; + my $participle = $patch_mode_flavour{PARTICIPLE}; + my $is_reverse = $patch_mode_flavour{IS_REVERSE}; + my ($remove_plus, $remove_minus) = $is_reverse ? ('-', '+') : ('+', '-'); + print $fh <; + close $fh; + unlink $hunkfile; + + # Abort if nothing remains + if (!grep { /\S/ } @newtext) { + return undef; + } + + # Reinsert the first hunk header if the user accidentally deleted it + if ($newtext[0] !~ /^@/) { + unshift @newtext, $oldtext->[0]; + } + return \@newtext; +} + +sub diff_applies { + my $fh; + return run_git_apply($patch_mode_flavour{APPLY_CHECK} . ' --recount --check', + map { @{$_->{TEXT}} } @_); +} + +sub _restore_terminal_and_die { + ReadMode 'restore'; + print "\n"; + exit 1; +} + +sub prompt_single_character { + if ($use_readkey) { + local $SIG{TERM} = \&_restore_terminal_and_die; + local $SIG{INT} = \&_restore_terminal_and_die; + ReadMode 'cbreak'; + my $key = ReadKey 0; + ReadMode 'restore'; + print "$key" if defined $key; + print "\n"; + return $key; + } else { + return ; + } +} + +sub prompt_yesno { + my ($prompt) = @_; + while (1) { + print colored $prompt_color, $prompt; + my $line = prompt_single_character; + return 0 if $line =~ /^n/i; + return 1 if $line =~ /^y/i; + } +} + +sub edit_hunk_loop { + my ($head, $hunk, $ix) = @_; + my $text = $hunk->[$ix]->{TEXT}; + + while (1) { + $text = edit_hunk_manually($text); + if (!defined $text) { + return undef; + } + my $newhunk = { + TEXT => $text, + TYPE => $hunk->[$ix]->{TYPE}, + USE => 1, + DIRTY => 1, + }; + if (diff_applies($head, + @{$hunk}[0..$ix-1], + $newhunk, + @{$hunk}[$ix+1..$#{$hunk}])) { + $newhunk->{DISPLAY} = [color_diff(@{$text})]; + return $newhunk; + } + else { + prompt_yesno( + 'Your edited hunk does not apply. Edit again ' + . '(saying "no" discards!) [y/n]? ' + ) or return undef; + } + } +} + +sub help_patch_cmd { + my $verb = lc $patch_mode_flavour{VERB}; + my $target = $patch_mode_flavour{TARGET}; + print colored $help_color, <{BINARY}) } @all_mods; + my @them; + + if (!@mods) { + if (@all_mods) { + print STDERR "Only binary files changed.\n"; + } else { + print STDERR "No changes.\n"; + } + return 0; + } + if ($patch_mode) { + @them = @mods; + } + else { + @them = list_and_choose({ PROMPT => 'Patch update', + HEADER => $status_head, }, + @mods); + } + for (@them) { + return 0 if patch_update_file($_->{VALUE}); + } +} + +# Generate a one line summary of a hunk. +sub summarize_hunk { + my $rhunk = shift; + my $summary = $rhunk->{TEXT}[0]; + + # Keep the line numbers, discard extra context. + $summary =~ s/@@(.*?)@@.*/$1 /s; + $summary .= " " x (20 - length $summary); + + # Add some user context. + for my $line (@{$rhunk->{TEXT}}) { + if ($line =~ m/^[+-].*\w/) { + $summary .= $line; + last; + } + } + + chomp $summary; + return substr($summary, 0, 80) . "\n"; +} + + +# Print a one-line summary of each hunk in the array ref in +# the first argument, starting wih the index in the 2nd. +sub display_hunks { + my ($hunks, $i) = @_; + my $ctr = 0; + $i ||= 0; + for (; $i < @$hunks && $ctr < 20; $i++, $ctr++) { + my $status = " "; + if (defined $hunks->[$i]{USE}) { + $status = $hunks->[$i]{USE} ? "+" : "-"; + } + printf "%s%2d: %s", + $status, + $i + 1, + summarize_hunk($hunks->[$i]); + } + return $i; +} + +sub patch_update_file { + my $quit = 0; + my ($ix, $num); + my $path = shift; + my ($head, @hunk) = parse_diff($path); + ($head, my $mode, my $deletion) = parse_diff_header($head); + for (@{$head->{DISPLAY}}) { + print; + } + + if (@{$mode->{TEXT}}) { + unshift @hunk, $mode; + } + if (@{$deletion->{TEXT}}) { + foreach my $hunk (@hunk) { + push @{$deletion->{TEXT}}, @{$hunk->{TEXT}}; + push @{$deletion->{DISPLAY}}, @{$hunk->{DISPLAY}}; + } + @hunk = ($deletion); + } + + $num = scalar @hunk; + $ix = 0; + + while (1) { + my ($prev, $next, $other, $undecided, $i); + $other = ''; + + if ($num <= $ix) { + $ix = 0; + } + for ($i = 0; $i < $ix; $i++) { + if (!defined $hunk[$i]{USE}) { + $prev = 1; + $other .= ',k'; + last; + } + } + if ($ix) { + $other .= ',K'; + } + for ($i = $ix + 1; $i < $num; $i++) { + if (!defined $hunk[$i]{USE}) { + $next = 1; + $other .= ',j'; + last; + } + } + if ($ix < $num - 1) { + $other .= ',J'; + } + if ($num > 1) { + $other .= ',g'; + } + for ($i = 0; $i < $num; $i++) { + if (!defined $hunk[$i]{USE}) { + $undecided = 1; + last; + } + } + last if (!$undecided); + + if ($hunk[$ix]{TYPE} eq 'hunk' && + hunk_splittable($hunk[$ix]{TEXT})) { + $other .= ',s'; + } + if ($hunk[$ix]{TYPE} eq 'hunk') { + $other .= ',e'; + } + for (@{$hunk[$ix]{DISPLAY}}) { + print; + } + print colored $prompt_color, $patch_mode_flavour{VERB}, + ($hunk[$ix]{TYPE} eq 'mode' ? ' mode change' : + $hunk[$ix]{TYPE} eq 'deletion' ? ' deletion' : + ' this hunk'), + $patch_mode_flavour{TARGET}, + " [y,n,q,a,d,/$other,?]? "; + my $line = prompt_single_character; + if ($line) { + if ($line =~ /^y/i) { + $hunk[$ix]{USE} = 1; + } + elsif ($line =~ /^n/i) { + $hunk[$ix]{USE} = 0; + } + elsif ($line =~ /^a/i) { + while ($ix < $num) { + if (!defined $hunk[$ix]{USE}) { + $hunk[$ix]{USE} = 1; + } + $ix++; + } + next; + } + elsif ($other =~ /g/ && $line =~ /^g(.*)/) { + my $response = $1; + my $no = $ix > 10 ? $ix - 10 : 0; + while ($response eq '') { + my $extra = ""; + $no = display_hunks(\@hunk, $no); + if ($no < $num) { + $extra = " ( to see more)"; + } + print "go to which hunk$extra? "; + $response = ; + if (!defined $response) { + $response = ''; + } + chomp $response; + } + if ($response !~ /^\s*\d+\s*$/) { + error_msg "Invalid number: '$response'\n"; + } elsif (0 < $response && $response <= $num) { + $ix = $response - 1; + } else { + error_msg "Sorry, only $num hunks available.\n"; + } + next; + } + elsif ($line =~ /^d/i) { + while ($ix < $num) { + if (!defined $hunk[$ix]{USE}) { + $hunk[$ix]{USE} = 0; + } + $ix++; + } + next; + } + elsif ($line =~ /^q/i) { + while ($ix < $num) { + if (!defined $hunk[$ix]{USE}) { + $hunk[$ix]{USE} = 0; + } + $ix++; + } + $quit = 1; + next; + } + elsif ($line =~ m|^/(.*)|) { + my $regex = $1; + if ($1 eq "") { + print colored $prompt_color, "search for regex? "; + $regex = ; + if (defined $regex) { + chomp $regex; + } + } + my $search_string; + eval { + $search_string = qr{$regex}m; + }; + if ($@) { + my ($err,$exp) = ($@, $1); + $err =~ s/ at .*git-add--interactive line \d+, line \d+.*$//; + error_msg "Malformed search regexp $exp: $err\n"; + next; + } + my $iy = $ix; + while (1) { + my $text = join ("", @{$hunk[$iy]{TEXT}}); + last if ($text =~ $search_string); + $iy++; + $iy = 0 if ($iy >= $num); + if ($ix == $iy) { + error_msg "No hunk matches the given pattern\n"; + last; + } + } + $ix = $iy; + next; + } + elsif ($line =~ /^K/) { + if ($other =~ /K/) { + $ix--; + } + else { + error_msg "No previous hunk\n"; + } + next; + } + elsif ($line =~ /^J/) { + if ($other =~ /J/) { + $ix++; + } + else { + error_msg "No next hunk\n"; + } + next; + } + elsif ($line =~ /^k/) { + if ($other =~ /k/) { + while (1) { + $ix--; + last if (!$ix || + !defined $hunk[$ix]{USE}); + } + } + else { + error_msg "No previous hunk\n"; + } + next; + } + elsif ($line =~ /^j/) { + if ($other !~ /j/) { + error_msg "No next hunk\n"; + next; + } + } + elsif ($other =~ /s/ && $line =~ /^s/) { + my @split = split_hunk($hunk[$ix]{TEXT}, $hunk[$ix]{DISPLAY}); + if (1 < @split) { + print colored $header_color, "Split into ", + scalar(@split), " hunks.\n"; + } + splice (@hunk, $ix, 1, @split); + $num = scalar @hunk; + next; + } + elsif ($other =~ /e/ && $line =~ /^e/) { + my $newhunk = edit_hunk_loop($head, \@hunk, $ix); + if (defined $newhunk) { + splice @hunk, $ix, 1, $newhunk; + } + } + else { + help_patch_cmd($other); + next; + } + # soft increment + while (1) { + $ix++; + last if ($ix >= $num || + !defined $hunk[$ix]{USE}); + } + } + } + + @hunk = coalesce_overlapping_hunks(@hunk); + + my $n_lofs = 0; + my @result = (); + for (@hunk) { + if ($_->{USE}) { + push @result, @{$_->{TEXT}}; + } + } + + if (@result) { + my $fh; + my @patch = reassemble_patch($head->{TEXT}, @result); + my $apply_routine = $patch_mode_flavour{APPLY}; + &$apply_routine(@patch); + refresh(); + } + + print "\n"; + return $quit; +} + +sub diff_cmd { + my @mods = list_modified('index-only'); + @mods = grep { !($_->{BINARY}) } @mods; + return if (!@mods); + my (@them) = list_and_choose({ PROMPT => 'Review diff', + IMMEDIATE => 1, + HEADER => $status_head, }, + @mods); + return if (!@them); + my $reference = is_initial_commit() ? get_empty_tree() : 'HEAD'; + system(qw(git diff -p --cached), $reference, '--', + map { $_->{VALUE} } @them); +} + +sub quit_cmd { + print "Bye.\n"; + exit(0); +} + +sub help_cmd { + print colored $help_color, <<\EOF ; +status - show paths with changes +update - add working tree state to the staged set of changes +revert - revert staged set of changes back to the HEAD version +patch - pick hunks and update selectively +diff - view diff between HEAD and index +add untracked - add contents of untracked files to the staged set of changes +EOF +} + +sub process_args { + return unless @ARGV; + my $arg = shift @ARGV; + if ($arg =~ /--patch(?:=(.*))?/) { + if (defined $1) { + if ($1 eq 'reset') { + $patch_mode = 'reset_head'; + $patch_mode_revision = 'HEAD'; + $arg = shift @ARGV or die "missing --"; + if ($arg ne '--') { + $patch_mode_revision = $arg; + $patch_mode = ($arg eq 'HEAD' ? + 'reset_head' : 'reset_nothead'); + $arg = shift @ARGV or die "missing --"; + } + } elsif ($1 eq 'checkout') { + $arg = shift @ARGV or die "missing --"; + if ($arg eq '--') { + $patch_mode = 'checkout_index'; + } else { + $patch_mode_revision = $arg; + $patch_mode = ($arg eq 'HEAD' ? + 'checkout_head' : 'checkout_nothead'); + $arg = shift @ARGV or die "missing --"; + } + } elsif ($1 eq 'stage' or $1 eq 'stash') { + $patch_mode = $1; + $arg = shift @ARGV or die "missing --"; + } else { + die "unknown --patch mode: $1"; + } + } else { + $patch_mode = 'stage'; + $arg = shift @ARGV or die "missing --"; + } + die "invalid argument $arg, expecting --" + unless $arg eq "--"; + %patch_mode_flavour = %{$patch_modes{$patch_mode}}; + } + elsif ($arg ne "--") { + die "invalid argument $arg, expecting --"; + } +} + +sub main_loop { + my @cmd = ([ 'status', \&status_cmd, ], + [ 'update', \&update_cmd, ], + [ 'revert', \&revert_cmd, ], + [ 'add untracked', \&add_untracked_cmd, ], + [ 'patch', \&patch_update_cmd, ], + [ 'diff', \&diff_cmd, ], + [ 'quit', \&quit_cmd, ], + [ 'help', \&help_cmd, ], + ); + while (1) { + my ($it) = list_and_choose({ PROMPT => 'What now', + SINGLETON => 1, + LIST_FLAT => 4, + HEADER => '*** Commands ***', + ON_EOF => \&quit_cmd, + IMMEDIATE => 1 }, @cmd); + if ($it) { + eval { + $it->[1]->(); + }; + if ($@) { + print "$@"; + } + } + } +} + +process_args(); +refresh(); +if ($patch_mode) { + patch_update_cmd(); +} +else { + status_cmd(); + main_loop(); +} diff --git a/SparkleShare/Mac/git/libexec/git-core/git-am b/SparkleShare/Mac/git/libexec/git-core/git-am new file mode 100755 index 00000000..6cdd5910 --- /dev/null +++ b/SparkleShare/Mac/git/libexec/git-core/git-am @@ -0,0 +1,841 @@ +#!/bin/sh +# +# Copyright (c) 2005, 2006 Junio C Hamano + +SUBDIRECTORY_OK=Yes +OPTIONS_KEEPDASHDASH= +OPTIONS_SPEC="\ +git am [options] [(|)...] +git am [options] (--resolved | --skip | --abort) +-- +i,interactive run interactively +b,binary* (historical option -- no-op) +3,3way allow fall back on 3way merging if needed +q,quiet be quiet +s,signoff add a Signed-off-by line to the commit message +u,utf8 recode into utf8 (default) +k,keep pass -k flag to git-mailinfo +keep-cr pass --keep-cr flag to git-mailsplit for mbox format +no-keep-cr do not pass --keep-cr flag to git-mailsplit independent of am.keepcr +c,scissors strip everything before a scissors line +whitespace= pass it through git-apply +ignore-space-change pass it through git-apply +ignore-whitespace pass it through git-apply +directory= pass it through git-apply +C= pass it through git-apply +p= pass it through git-apply +patch-format= format the patch(es) are in +reject pass it through git-apply +resolvemsg= override error message when patch failure occurs +continue continue applying patches after resolving a conflict +r,resolved synonyms for --continue +skip skip the current patch +abort restore the original branch and abort the patching operation. +committer-date-is-author-date lie about committer date +ignore-date use current timestamp for author date +rerere-autoupdate update the index with reused conflict resolution if possible +rebasing* (internal use for git-rebase)" + +. git-sh-setup +prefix=$(git rev-parse --show-prefix) +set_reflog_action am +require_work_tree +cd_to_toplevel + +git var GIT_COMMITTER_IDENT >/dev/null || + die "You need to set your committer info first" + +if git rev-parse --verify -q HEAD >/dev/null +then + HAS_HEAD=yes +else + HAS_HEAD= +fi + +cmdline="git am" +if test '' != "$interactive" +then + cmdline="$cmdline -i" +fi +if test '' != "$threeway" +then + cmdline="$cmdline -3" +fi + +sq () { + git rev-parse --sq-quote "$@" +} + +stop_here () { + echo "$1" >"$dotest/next" + git rev-parse --verify -q HEAD >"$dotest/abort-safety" + exit 1 +} + +safe_to_abort () { + if test -f "$dotest/dirtyindex" + then + return 1 + fi + + if ! test -s "$dotest/abort-safety" + then + return 0 + fi + + abort_safety=$(cat "$dotest/abort-safety") + if test "z$(git rev-parse --verify -q HEAD)" = "z$abort_safety" + then + return 0 + fi + echo >&2 "You seem to have moved HEAD since the last 'am' failure." + echo >&2 "Not rewinding to ORIG_HEAD" + return 1 +} + +stop_here_user_resolve () { + if [ -n "$resolvemsg" ]; then + printf '%s\n' "$resolvemsg" + stop_here $1 + fi + echo "When you have resolved this problem run \"$cmdline --resolved\"." + echo "If you would prefer to skip this patch, instead run \"$cmdline --skip\"." + echo "To restore the original branch and stop patching run \"$cmdline --abort\"." + + stop_here $1 +} + +go_next () { + rm -f "$dotest/$msgnum" "$dotest/msg" "$dotest/msg-clean" \ + "$dotest/patch" "$dotest/info" + echo "$next" >"$dotest/next" + this=$next +} + +cannot_fallback () { + echo "$1" + echo "Cannot fall back to three-way merge." + exit 1 +} + +fall_back_3way () { + O_OBJECT=`cd "$GIT_OBJECT_DIRECTORY" && pwd` + + rm -fr "$dotest"/patch-merge-* + mkdir "$dotest/patch-merge-tmp-dir" + + # First see if the patch records the index info that we can use. + git apply --build-fake-ancestor "$dotest/patch-merge-tmp-index" \ + "$dotest/patch" && + GIT_INDEX_FILE="$dotest/patch-merge-tmp-index" \ + git write-tree >"$dotest/patch-merge-base+" || + cannot_fallback "Repository lacks necessary blobs to fall back on 3-way merge." + + say Using index info to reconstruct a base tree... + if GIT_INDEX_FILE="$dotest/patch-merge-tmp-index" \ + git apply --cached <"$dotest/patch" + then + mv "$dotest/patch-merge-base+" "$dotest/patch-merge-base" + mv "$dotest/patch-merge-tmp-index" "$dotest/patch-merge-index" + else + cannot_fallback "Did you hand edit your patch? +It does not apply to blobs recorded in its index." + fi + + test -f "$dotest/patch-merge-index" && + his_tree=$(GIT_INDEX_FILE="$dotest/patch-merge-index" git write-tree) && + orig_tree=$(cat "$dotest/patch-merge-base") && + rm -fr "$dotest"/patch-merge-* || exit 1 + + say Falling back to patching base and 3-way merge... + + # This is not so wrong. Depending on which base we picked, + # orig_tree may be wildly different from ours, but his_tree + # has the same set of wildly different changes in parts the + # patch did not touch, so recursive ends up canceling them, + # saying that we reverted all those changes. + + eval GITHEAD_$his_tree='"$FIRSTLINE"' + export GITHEAD_$his_tree + if test -n "$GIT_QUIET" + then + GIT_MERGE_VERBOSITY=0 && export GIT_MERGE_VERBOSITY + fi + git-merge-recursive $orig_tree -- HEAD $his_tree || { + git rerere $allow_rerere_autoupdate + echo Failed to merge in the changes. + exit 1 + } + unset GITHEAD_$his_tree +} + +clean_abort () { + test $# = 0 || echo >&2 "$@" + rm -fr "$dotest" + exit 1 +} + +patch_format= + +check_patch_format () { + # early return if patch_format was set from the command line + if test -n "$patch_format" + then + return 0 + fi + + # we default to mbox format if input is from stdin and for + # directories + if test $# = 0 || test "x$1" = "x-" || test -d "$1" + then + patch_format=mbox + return 0 + fi + + # otherwise, check the first few lines of the first patch to try + # to detect its format + { + read l1 + read l2 + read l3 + case "$l1" in + "From "* | "From: "*) + patch_format=mbox + ;; + '# This series applies on GIT commit'*) + patch_format=stgit-series + ;; + "# HG changeset patch") + patch_format=hg + ;; + *) + # if the second line is empty and the third is + # a From, Author or Date entry, this is very + # likely an StGIT patch + case "$l2,$l3" in + ,"From: "* | ,"Author: "* | ,"Date: "*) + patch_format=stgit + ;; + *) + ;; + esac + ;; + esac + if test -z "$patch_format" && + test -n "$l1" && + test -n "$l2" && + test -n "$l3" + then + # This begins with three non-empty lines. Is this a + # piece of e-mail a-la RFC2822? Grab all the headers, + # discarding the indented remainder of folded lines, + # and see if it looks like that they all begin with the + # header field names... + tr -d '\015' <"$1" | + sed -n -e '/^$/q' -e '/^[ ]/d' -e p | + sane_egrep -v '^[!-9;-~]+:' >/dev/null || + patch_format=mbox + fi + } < "$1" || clean_abort +} + +split_patches () { + case "$patch_format" in + mbox) + if test -n "$rebasing" || test t = "$keepcr" + then + keep_cr=--keep-cr + else + keep_cr= + fi + git mailsplit -d"$prec" -o"$dotest" -b $keep_cr -- "$@" > "$dotest/last" || + clean_abort + ;; + stgit-series) + if test $# -ne 1 + then + clean_abort "Only one StGIT patch series can be applied at once" + fi + series_dir=`dirname "$1"` + series_file="$1" + shift + { + set x + while read filename + do + set "$@" "$series_dir/$filename" + done + # remove the safety x + shift + # remove the arg coming from the first-line comment + shift + } < "$series_file" || clean_abort + # set the patch format appropriately + patch_format=stgit + # now handle the actual StGIT patches + split_patches "$@" + ;; + stgit) + this=0 + for stgit in "$@" + do + this=`expr "$this" + 1` + msgnum=`printf "%0${prec}d" $this` + # Perl version of StGIT parse_patch. The first nonemptyline + # not starting with Author, From or Date is the + # subject, and the body starts with the next nonempty + # line not starting with Author, From or Date + perl -ne 'BEGIN { $subject = 0 } + if ($subject > 1) { print ; } + elsif (/^\s+$/) { next ; } + elsif (/^Author:/) { print s/Author/From/ ; } + elsif (/^(From|Date)/) { print ; } + elsif ($subject) { + $subject = 2 ; + print "\n" ; + print ; + } else { + print "Subject: ", $_ ; + $subject = 1; + } + ' < "$stgit" > "$dotest/$msgnum" || clean_abort + done + echo "$this" > "$dotest/last" + this= + msgnum= + ;; + *) + if test -n "$parse_patch" ; then + clean_abort "Patch format $patch_format is not supported." + else + clean_abort "Patch format detection failed." + fi + ;; + esac +} + +prec=4 +dotest="$GIT_DIR/rebase-apply" +sign= utf8=t keep= keepcr= skip= interactive= resolved= rebasing= abort= +resolvemsg= resume= scissors= no_inbody_headers= +git_apply_opt= +committer_date_is_author_date= +ignore_date= +allow_rerere_autoupdate= + +if test "$(git config --bool --get am.keepcr)" = true +then + keepcr=t +fi + +while test $# != 0 +do + case "$1" in + -i|--interactive) + interactive=t ;; + -b|--binary) + : ;; + -3|--3way) + threeway=t ;; + -s|--signoff) + sign=t ;; + -u|--utf8) + utf8=t ;; # this is now default + --no-utf8) + utf8= ;; + -k|--keep) + keep=t ;; + -c|--scissors) + scissors=t ;; + --no-scissors) + scissors=f ;; + -r|--resolved|--continue) + resolved=t ;; + --skip) + skip=t ;; + --abort) + abort=t ;; + --rebasing) + rebasing=t threeway=t keep=t scissors=f no_inbody_headers=t ;; + -d|--dotest) + die "-d option is no longer supported. Do not use." + ;; + --resolvemsg) + shift; resolvemsg=$1 ;; + --whitespace|--directory) + git_apply_opt="$git_apply_opt $(sq "$1=$2")"; shift ;; + -C|-p) + git_apply_opt="$git_apply_opt $(sq "$1$2")"; shift ;; + --patch-format) + shift ; patch_format="$1" ;; + --reject|--ignore-whitespace|--ignore-space-change) + git_apply_opt="$git_apply_opt $1" ;; + --committer-date-is-author-date) + committer_date_is_author_date=t ;; + --ignore-date) + ignore_date=t ;; + --rerere-autoupdate|--no-rerere-autoupdate) + allow_rerere_autoupdate="$1" ;; + -q|--quiet) + GIT_QUIET=t ;; + --keep-cr) + keepcr=t ;; + --no-keep-cr) + keepcr=f ;; + --) + shift; break ;; + *) + usage ;; + esac + shift +done + +# If the dotest directory exists, but we have finished applying all the +# patches in them, clear it out. +if test -d "$dotest" && + last=$(cat "$dotest/last") && + next=$(cat "$dotest/next") && + test $# != 0 && + test "$next" -gt "$last" +then + rm -fr "$dotest" +fi + +if test -d "$dotest" +then + case "$#,$skip$resolved$abort" in + 0,*t*) + # Explicit resume command and we do not have file, so + # we are happy. + : ;; + 0,) + # No file input but without resume parameters; catch + # user error to feed us a patch from standard input + # when there is already $dotest. This is somewhat + # unreliable -- stdin could be /dev/null for example + # and the caller did not intend to feed us a patch but + # wanted to continue unattended. + test -t 0 + ;; + *) + false + ;; + esac || + die "previous rebase directory $dotest still exists but mbox given." + resume=yes + + case "$skip,$abort" in + t,t) + die "Please make up your mind. --skip or --abort?" + ;; + t,) + git rerere clear + git read-tree --reset -u HEAD HEAD + orig_head=$(cat "$GIT_DIR/ORIG_HEAD") + git reset HEAD + git update-ref ORIG_HEAD $orig_head + ;; + ,t) + if test -f "$dotest/rebasing" + then + exec git rebase --abort + fi + git rerere clear + if safe_to_abort + then + git read-tree --reset -u HEAD ORIG_HEAD + git reset ORIG_HEAD + fi + rm -fr "$dotest" + exit ;; + esac + rm -f "$dotest/dirtyindex" +else + # Make sure we are not given --skip, --resolved, nor --abort + test "$skip$resolved$abort" = "" || + die "Resolve operation not in progress, we are not resuming." + + # Start afresh. + mkdir -p "$dotest" || exit + + if test -n "$prefix" && test $# != 0 + then + first=t + for arg + do + test -n "$first" && { + set x + first= + } + if is_absolute_path "$arg" + then + set "$@" "$arg" + else + set "$@" "$prefix$arg" + fi + done + shift + fi + + check_patch_format "$@" + + split_patches "$@" + + # -i can and must be given when resuming; everything + # else is kept + echo " $git_apply_opt" >"$dotest/apply-opt" + echo "$threeway" >"$dotest/threeway" + echo "$sign" >"$dotest/sign" + echo "$utf8" >"$dotest/utf8" + echo "$keep" >"$dotest/keep" + echo "$keepcr" >"$dotest/keepcr" + echo "$scissors" >"$dotest/scissors" + echo "$no_inbody_headers" >"$dotest/no_inbody_headers" + echo "$GIT_QUIET" >"$dotest/quiet" + echo 1 >"$dotest/next" + if test -n "$rebasing" + then + : >"$dotest/rebasing" + else + : >"$dotest/applying" + if test -n "$HAS_HEAD" + then + git update-ref ORIG_HEAD HEAD + else + git update-ref -d ORIG_HEAD >/dev/null 2>&1 + fi + fi +fi + +case "$resolved" in +'') + case "$HAS_HEAD" in + '') + files=$(git ls-files) ;; + ?*) + files=$(git diff-index --cached --name-only HEAD --) ;; + esac || exit + if test "$files" + then + test -n "$HAS_HEAD" && : >"$dotest/dirtyindex" + die "Dirty index: cannot apply patches (dirty: $files)" + fi +esac + +if test "$(cat "$dotest/utf8")" = t +then + utf8=-u +else + utf8=-n +fi +if test "$(cat "$dotest/keep")" = t +then + keep=-k +fi +case "$(cat "$dotest/keepcr")" in +t) + keepcr=--keep-cr ;; +f) + keepcr=--no-keep-cr ;; +esac +case "$(cat "$dotest/scissors")" in +t) + scissors=--scissors ;; +f) + scissors=--no-scissors ;; +esac +if test "$(cat "$dotest/no_inbody_headers")" = t +then + no_inbody_headers=--no-inbody-headers +else + no_inbody_headers= +fi +if test "$(cat "$dotest/quiet")" = t +then + GIT_QUIET=t +fi +if test "$(cat "$dotest/threeway")" = t +then + threeway=t +fi +git_apply_opt=$(cat "$dotest/apply-opt") +if test "$(cat "$dotest/sign")" = t +then + SIGNOFF=`git var GIT_COMMITTER_IDENT | sed -e ' + s/>.*/>/ + s/^/Signed-off-by: /' + ` +else + SIGNOFF= +fi + +last=`cat "$dotest/last"` +this=`cat "$dotest/next"` +if test "$skip" = t +then + this=`expr "$this" + 1` + resume= +fi + +while test "$this" -le "$last" +do + msgnum=`printf "%0${prec}d" $this` + next=`expr "$this" + 1` + test -f "$dotest/$msgnum" || { + resume= + go_next + continue + } + + # If we are not resuming, parse and extract the patch information + # into separate files: + # - info records the authorship and title + # - msg is the rest of commit log message + # - patch is the patch body. + # + # When we are resuming, these files are either already prepared + # by the user, or the user can tell us to do so by --resolved flag. + case "$resume" in + '') + git mailinfo $keep $no_inbody_headers $scissors $utf8 "$dotest/msg" "$dotest/patch" \ + <"$dotest/$msgnum" >"$dotest/info" || + stop_here $this + + # skip pine's internal folder data + sane_grep '^Author: Mail System Internal Data$' \ + <"$dotest"/info >/dev/null && + go_next && continue + + test -s "$dotest/patch" || { + echo "Patch is empty. Was it split wrong?" + echo "If you would prefer to skip this patch, instead run \"$cmdline --skip\"." + echo "To restore the original branch and stop patching run \"$cmdline --abort\"." + stop_here $this + } + rm -f "$dotest/original-commit" "$dotest/author-script" + if test -f "$dotest/rebasing" && + commit=$(sed -e 's/^From \([0-9a-f]*\) .*/\1/' \ + -e q "$dotest/$msgnum") && + test "$(git cat-file -t "$commit")" = commit + then + git cat-file commit "$commit" | + sed -e '1,/^$/d' >"$dotest/msg-clean" + echo "$commit" > "$dotest/original-commit" + get_author_ident_from_commit "$commit" > "$dotest/author-script" + else + { + sed -n '/^Subject/ s/Subject: //p' "$dotest/info" + echo + cat "$dotest/msg" + } | + git stripspace > "$dotest/msg-clean" + fi + ;; + esac + + if test -f "$dotest/author-script" + then + eval $(cat "$dotest/author-script") + else + GIT_AUTHOR_NAME="$(sed -n '/^Author/ s/Author: //p' "$dotest/info")" + GIT_AUTHOR_EMAIL="$(sed -n '/^Email/ s/Email: //p' "$dotest/info")" + GIT_AUTHOR_DATE="$(sed -n '/^Date/ s/Date: //p' "$dotest/info")" + fi + + if test -z "$GIT_AUTHOR_EMAIL" + then + echo "Patch does not have a valid e-mail address." + stop_here $this + fi + + export GIT_AUTHOR_NAME GIT_AUTHOR_EMAIL GIT_AUTHOR_DATE + + case "$resume" in + '') + if test '' != "$SIGNOFF" + then + LAST_SIGNED_OFF_BY=` + sed -ne '/^Signed-off-by: /p' \ + "$dotest/msg-clean" | + sed -ne '$p' + ` + ADD_SIGNOFF=` + test "$LAST_SIGNED_OFF_BY" = "$SIGNOFF" || { + test '' = "$LAST_SIGNED_OFF_BY" && echo + echo "$SIGNOFF" + }` + else + ADD_SIGNOFF= + fi + { + if test -s "$dotest/msg-clean" + then + cat "$dotest/msg-clean" + fi + if test '' != "$ADD_SIGNOFF" + then + echo "$ADD_SIGNOFF" + fi + } >"$dotest/final-commit" + ;; + *) + case "$resolved$interactive" in + tt) + # This is used only for interactive view option. + git diff-index -p --cached HEAD -- >"$dotest/patch" + ;; + esac + esac + + resume= + if test "$interactive" = t + then + test -t 0 || + die "cannot be interactive without stdin connected to a terminal." + action=again + while test "$action" = again + do + echo "Commit Body is:" + echo "--------------------------" + cat "$dotest/final-commit" + echo "--------------------------" + printf "Apply? [y]es/[n]o/[e]dit/[v]iew patch/[a]ccept all " + read reply + case "$reply" in + [yY]*) action=yes ;; + [aA]*) action=yes interactive= ;; + [nN]*) action=skip ;; + [eE]*) git_editor "$dotest/final-commit" + action=again ;; + [vV]*) action=again + git_pager "$dotest/patch" ;; + *) action=again ;; + esac + done + else + action=yes + fi + + if test -f "$dotest/final-commit" + then + FIRSTLINE=$(sed 1q "$dotest/final-commit") + else + FIRSTLINE="" + fi + + if test $action = skip + then + go_next + continue + fi + + if test -x "$GIT_DIR"/hooks/applypatch-msg + then + "$GIT_DIR"/hooks/applypatch-msg "$dotest/final-commit" || + stop_here $this + fi + + say "Applying: $FIRSTLINE" + + case "$resolved" in + '') + # When we are allowed to fall back to 3-way later, don't give + # false errors during the initial attempt. + squelch= + if test "$threeway" = t + then + squelch='>/dev/null 2>&1 ' + fi + eval "git apply $squelch$git_apply_opt"' --index "$dotest/patch"' + apply_status=$? + ;; + t) + # Resolved means the user did all the hard work, and + # we do not have to do any patch application. Just + # trust what the user has in the index file and the + # working tree. + resolved= + git diff-index --quiet --cached HEAD -- && { + echo "No changes - did you forget to use 'git add'?" + echo "If there is nothing left to stage, chances are that something else" + echo "already introduced the same changes; you might want to skip this patch." + stop_here_user_resolve $this + } + unmerged=$(git ls-files -u) + if test -n "$unmerged" + then + echo "You still have unmerged paths in your index" + echo "did you forget to use 'git add'?" + stop_here_user_resolve $this + fi + apply_status=0 + git rerere + ;; + esac + + if test $apply_status != 0 && test "$threeway" = t + then + if (fall_back_3way) + then + # Applying the patch to an earlier tree and merging the + # result may have produced the same tree as ours. + git diff-index --quiet --cached HEAD -- && { + say No changes -- Patch already applied. + go_next + continue + } + # clear apply_status -- we have successfully merged. + apply_status=0 + fi + fi + if test $apply_status != 0 + then + printf 'Patch failed at %s %s\n' "$msgnum" "$FIRSTLINE" + stop_here_user_resolve $this + fi + + if test -x "$GIT_DIR"/hooks/pre-applypatch + then + "$GIT_DIR"/hooks/pre-applypatch || stop_here $this + fi + + tree=$(git write-tree) && + commit=$( + if test -n "$ignore_date" + then + GIT_AUTHOR_DATE= + fi + parent=$(git rev-parse --verify -q HEAD) || + say >&2 "applying to an empty history" + + if test -n "$committer_date_is_author_date" + then + GIT_COMMITTER_DATE="$GIT_AUTHOR_DATE" + export GIT_COMMITTER_DATE + fi && + git commit-tree $tree ${parent:+-p} $parent <"$dotest/final-commit" + ) && + git update-ref -m "$GIT_REFLOG_ACTION: $FIRSTLINE" HEAD $commit $parent || + stop_here $this + + if test -f "$dotest/original-commit"; then + echo "$(cat "$dotest/original-commit") $commit" >> "$dotest/rewritten" + fi + + if test -x "$GIT_DIR"/hooks/post-applypatch + then + "$GIT_DIR"/hooks/post-applypatch + fi + + go_next +done + +if test -s "$dotest"/rewritten; then + git notes copy --for-rewrite=rebase < "$dotest"/rewritten + if test -x "$GIT_DIR"/hooks/post-rewrite; then + "$GIT_DIR"/hooks/post-rewrite rebase < "$dotest"/rewritten + fi +fi + +rm -fr "$dotest" +git gc --auto diff --git a/SparkleShare/Mac/git/libexec/git-core/git-annotate b/SparkleShare/Mac/git/libexec/git-core/git-annotate new file mode 120000 index 00000000..f2cccfc2 --- /dev/null +++ b/SparkleShare/Mac/git/libexec/git-core/git-annotate @@ -0,0 +1 @@ +../../bin/git \ No newline at end of file diff --git a/SparkleShare/Mac/git/libexec/git-core/git-apply b/SparkleShare/Mac/git/libexec/git-core/git-apply new file mode 120000 index 00000000..f2cccfc2 --- /dev/null +++ b/SparkleShare/Mac/git/libexec/git-core/git-apply @@ -0,0 +1 @@ +../../bin/git \ No newline at end of file diff --git a/SparkleShare/Mac/git/libexec/git-core/git-archimport b/SparkleShare/Mac/git/libexec/git-core/git-archimport new file mode 100755 index 00000000..a0b4dd5f --- /dev/null +++ b/SparkleShare/Mac/git/libexec/git-core/git-archimport @@ -0,0 +1,1135 @@ +#!/usr/bin/perl +use lib (split(/:/, $ENV{GITPERLLIB} || "/usr/local/git/lib/perl5/site_perl")); +# +# This tool is copyright (c) 2005, Martin Langhoff. +# It is released under the Gnu Public License, version 2. +# +# The basic idea is to walk the output of tla abrowse, +# fetch the changesets and apply them. +# + +=head1 Invocation + + git archimport [ -h ] [ -v ] [ -o ] [ -a ] [ -f ] [ -T ] + [ -D depth] [ -t tempdir ] / [ / ] + +Imports a project from one or more Arch repositories. It will follow branches +and repositories within the namespaces defined by the +parameters supplied. If it cannot find the remote branch a merge comes from +it will just import it as a regular commit. If it can find it, it will mark it +as a merge whenever possible. + +See man (1) git-archimport for more details. + +=head1 TODO + + - create tag objects instead of ref tags + - audit shell-escaping of filenames + - hide our private tags somewhere smarter + - find a way to make "cat *patches | patch" safe even when patchfiles are missing newlines + - sort and apply patches by graphing ancestry relations instead of just + relying in dates supplied in the changeset itself. + tla ancestry-graph -m could be helpful here... + +=head1 Devel tricks + +Add print in front of the shell commands invoked via backticks. + +=head1 Devel Notes + +There are several places where Arch and git terminology are intermixed +and potentially confused. + +The notion of a "branch" in git is approximately equivalent to +a "archive/category--branch--version" in Arch. Also, it should be noted +that the "--branch" portion of "archive/category--branch--version" is really +optional in Arch although not many people (nor tools!) seem to know this. +This means that "archive/category--version" is also a valid "branch" +in git terms. + +We always refer to Arch names by their fully qualified variant (which +means the "archive" name is prefixed. + +For people unfamiliar with Arch, an "archive" is the term for "repository", +and can contain multiple, unrelated branches. + +=cut + +use 5.008; +use strict; +use warnings; +use Getopt::Std; +use File::Temp qw(tempdir); +use File::Path qw(mkpath rmtree); +use File::Basename qw(basename dirname); +use Data::Dumper qw/ Dumper /; +use IPC::Open2; + +$SIG{'PIPE'}="IGNORE"; +$ENV{'TZ'}="UTC"; + +my $git_dir = $ENV{"GIT_DIR"} || ".git"; +$ENV{"GIT_DIR"} = $git_dir; +my $ptag_dir = "$git_dir/archimport/tags"; + +our($opt_h,$opt_f,$opt_v,$opt_T,$opt_t,$opt_D,$opt_a,$opt_o); + +sub usage() { + print STDERR <= 1 or usage(); +# $arch_branches: +# values associated with keys: +# =1 - Arch version / git 'branch' detected via abrowse on a limit +# >1 - Arch version / git 'branch' of an auxiliary branch we've merged +my %arch_branches = map { my $branch = $_; $branch =~ s/:[^:]*$//; $branch => 1 } @ARGV; + +# $branch_name_map: +# maps arch branches to git branch names +my %branch_name_map = map { m/^(.*):([^:]*)$/; $1 => $2 } grep { m/:/ } @ARGV; + +$ENV{'TMPDIR'} = $opt_t if $opt_t; # $ENV{TMPDIR} will affect tempdir() calls: +my $tmp = tempdir('git-archimport-XXXXXX', TMPDIR => 1, CLEANUP => 1); +$opt_v && print "+ Using $tmp as temporary directory\n"; + +unless (-d $git_dir) { # initial import needs empty directory + opendir DIR, '.' or die "Unable to open current directory: $!\n"; + while (my $entry = readdir DIR) { + $entry =~ /^\.\.?$/ or + die "Initial import needs an empty current working directory.\n" + } + closedir DIR +} + +my $default_archive; # default Arch archive +my %reachable = (); # Arch repositories we can access +my %unreachable = (); # Arch repositories we can't access :< +my @psets = (); # the collection +my %psets = (); # the collection, by name +my %stats = ( # Track which strategy we used to import: + get_tag => 0, replay => 0, get_new => 0, get_delta => 0, + simple_changeset => 0, import_or_tag => 0 +); + +my %rptags = (); # my reverse private tags + # to map a SHA1 to a commitid +my $TLA = $ENV{'ARCH_CLIENT'} || 'tla'; + +sub do_abrowse { + my $stage = shift; + while (my ($limit, $level) = each %arch_branches) { + next unless $level == $stage; + + open ABROWSE, "$TLA abrowse -fkD --merges $limit |" + or die "Problems with tla abrowse: $!"; + + my %ps = (); # the current one + my $lastseen = ''; + + while () { + chomp; + + # first record padded w 8 spaces + if (s/^\s{8}\b//) { + my ($id, $type) = split(m/\s+/, $_, 2); + + my %last_ps; + # store the record we just captured + if (%ps && !exists $psets{ $ps{id} }) { + %last_ps = %ps; # break references + push (@psets, \%last_ps); + $psets{ $last_ps{id} } = \%last_ps; + } + + my $branch = extract_versionname($id); + %ps = ( id => $id, branch => $branch ); + if (%last_ps && ($last_ps{branch} eq $branch)) { + $ps{parent_id} = $last_ps{id}; + } + + $arch_branches{$branch} = 1; + $lastseen = 'id'; + + # deal with types (should work with baz or tla): + if ($type =~ m/\(.*changeset\)/) { + $ps{type} = 's'; + } elsif ($type =~ /\(.*import\)/) { + $ps{type} = 'i'; + } elsif ($type =~ m/\(tag.*?(\S+\@\S+).*?\)/) { + $ps{type} = 't'; + # read which revision we've tagged when we parse the log + $ps{tag} = $1; + } else { + warn "Unknown type $type"; + } + + $arch_branches{$branch} = 1; + $lastseen = 'id'; + } elsif (s/^\s{10}//) { + # 10 leading spaces or more + # indicate commit metadata + + # date + if ($lastseen eq 'id' && m/^(\d{4}-\d\d-\d\d \d\d:\d\d:\d\d)/){ + $ps{date} = $1; + $lastseen = 'date'; + } elsif ($_ eq 'merges in:') { + $ps{merges} = []; + $lastseen = 'merges'; + } elsif ($lastseen eq 'merges' && s/^\s{2}//) { + my $id = $_; + push (@{$ps{merges}}, $id); + + # aggressive branch finding: + if ($opt_D) { + my $branch = extract_versionname($id); + my $repo = extract_reponame($branch); + + if (archive_reachable($repo) && + !defined $arch_branches{$branch}) { + $arch_branches{$branch} = $stage + 1; + } + } + } else { + warn "more metadata after merges!?: $_\n" unless /^\s*$/; + } + } + } + + if (%ps && !exists $psets{ $ps{id} }) { + my %temp = %ps; # break references + if (@psets && $psets[$#psets]{branch} eq $ps{branch}) { + $temp{parent_id} = $psets[$#psets]{id}; + } + push (@psets, \%temp); + $psets{ $temp{id} } = \%temp; + } + + close ABROWSE or die "$TLA abrowse failed on $limit\n"; + } +} # end foreach $root + +do_abrowse(1); +my $depth = 2; +$opt_D ||= 0; +while ($depth <= $opt_D) { + do_abrowse($depth); + $depth++; +} + +## Order patches by time +# FIXME see if we can find a more optimal way to do this by graphing +# the ancestry data and walking it, that way we won't have to rely on +# client-supplied dates +@psets = sort {$a->{date}.$b->{id} cmp $b->{date}.$b->{id}} @psets; + +#print Dumper \@psets; + +## +## TODO cleanup irrelevant patches +## and put an initial import +## or a full tag +my $import = 0; +unless (-d $git_dir) { # initial import + if ($psets[0]{type} eq 'i' || $psets[0]{type} eq 't') { + print "Starting import from $psets[0]{id}\n"; + `git-init`; + die $! if $?; + $import = 1; + } else { + die "Need to start from an import or a tag -- cannot use $psets[0]{id}"; + } +} else { # progressing an import + # load the rptags + opendir(DIR, $ptag_dir) + || die "can't opendir: $!"; + while (my $file = readdir(DIR)) { + # skip non-interesting-files + next unless -f "$ptag_dir/$file"; + + # convert first '--' to '/' from old git-archimport to use + # as an archivename/c--b--v private tag + if ($file !~ m!,!) { + my $oldfile = $file; + $file =~ s!--!,!; + print STDERR "converting old tag $oldfile to $file\n"; + rename("$ptag_dir/$oldfile", "$ptag_dir/$file") or die $!; + } + my $sha = ptag($file); + chomp $sha; + $rptags{$sha} = $file; + } + closedir DIR; +} + +# process patchsets +# extract the Arch repository name (Arch "archive" in Arch-speak) +sub extract_reponame { + my $fq_cvbr = shift; # archivename/[[[[category]branch]version]revision] + return (split(/\//, $fq_cvbr))[0]; +} + +sub extract_versionname { + my $name = shift; + $name =~ s/--(?:patch|version(?:fix)?|base)-\d+$//; + return $name; +} + +# convert a fully-qualified revision or version to a unique dirname: +# normalperson@yhbt.net-05/mpd--uclinux--1--patch-2 +# becomes: normalperson@yhbt.net-05,mpd--uclinux--1 +# +# the git notion of a branch is closer to +# archive/category--branch--version than archive/category--branch, so we +# use this to convert to git branch names. +# Also, keep archive names but replace '/' with ',' since it won't require +# subdirectories, and is safer than swapping '--' which could confuse +# reverse-mapping when dealing with bastard branches that +# are just archive/category--version (no --branch) +sub tree_dirname { + my $revision = shift; + my $name = extract_versionname($revision); + $name =~ s#/#,#; + return $name; +} + +# old versions of git-archimport just use the part: +sub old_style_branchname { + my $id = shift; + my $ret = safe_pipe_capture($TLA,'parse-package-name','-p',$id); + chomp $ret; + return $ret; +} + +*git_default_branchname = $opt_o ? *old_style_branchname : *tree_dirname; + +# retrieve default archive, since $branch_name_map keys might not include it +sub get_default_archive { + if (!defined $default_archive) { + $default_archive = safe_pipe_capture($TLA,'my-default-archive'); + chomp $default_archive; + } + return $default_archive; +} + +sub git_branchname { + my $revision = shift; + my $name = extract_versionname($revision); + + if (exists $branch_name_map{$name}) { + return $branch_name_map{$name}; + + } elsif ($name =~ m#^([^/]*)/(.*)$# + && $1 eq get_default_archive() + && exists $branch_name_map{$2}) { + # the names given in the command-line lacked the archive. + return $branch_name_map{$2}; + + } else { + return git_default_branchname($revision); + } +} + +sub process_patchset_accurate { + my $ps = shift; + + # switch to that branch if we're not already in that branch: + if (-e "$git_dir/refs/heads/$ps->{branch}") { + system('git-checkout','-f',$ps->{branch}) == 0 or die "$! $?\n"; + + # remove any old stuff that got leftover: + my $rm = safe_pipe_capture('git-ls-files','--others','-z'); + rmtree(split(/\0/,$rm)) if $rm; + } + + # Apply the import/changeset/merge into the working tree + my $dir = sync_to_ps($ps); + # read the new log entry: + my @commitlog = safe_pipe_capture($TLA,'cat-log','-d',$dir,$ps->{id}); + die "Error in cat-log: $!" if $?; + chomp @commitlog; + + # grab variables we want from the log, new fields get added to $ps: + # (author, date, email, summary, message body ...) + parselog($ps, \@commitlog); + + if ($ps->{id} =~ /--base-0$/ && $ps->{id} ne $psets[0]{id}) { + # this should work when importing continuations + if ($ps->{tag} && (my $branchpoint = eval { ptag($ps->{tag}) })) { + + # find where we are supposed to branch from + if (! -e "$git_dir/refs/heads/$ps->{branch}") { + system('git-branch',$ps->{branch},$branchpoint) == 0 or die "$! $?\n"; + + # We trust Arch with the fact that this is just a tag, + # and it does not affect the state of the tree, so + # we just tag and move on. If the user really wants us + # to consolidate more branches into one, don't tag because + # the tag name would be already taken. + tag($ps->{id}, $branchpoint); + ptag($ps->{id}, $branchpoint); + print " * Tagged $ps->{id} at $branchpoint\n"; + } + system('git-checkout','-f',$ps->{branch}) == 0 or die "$! $?\n"; + + # remove any old stuff that got leftover: + my $rm = safe_pipe_capture('git-ls-files','--others','-z'); + rmtree(split(/\0/,$rm)) if $rm; + return 0; + } else { + warn "Tagging from unknown id unsupported\n" if $ps->{tag}; + } + # allow multiple bases/imports here since Arch supports cherry-picks + # from unrelated trees + } + + # update the index with all the changes we got + system('git-diff-files --name-only -z | '. + 'git-update-index --remove -z --stdin') == 0 or die "$! $?\n"; + system('git-ls-files --others -z | '. + 'git-update-index --add -z --stdin') == 0 or die "$! $?\n"; + return 1; +} + +# the native changeset processing strategy. This is very fast, but +# does not handle permissions or any renames involving directories +sub process_patchset_fast { + my $ps = shift; + # + # create the branch if needed + # + if ($ps->{type} eq 'i' && !$import) { + die "Should not have more than one 'Initial import' per GIT import: $ps->{id}"; + } + + unless ($import) { # skip for import + if ( -e "$git_dir/refs/heads/$ps->{branch}") { + # we know about this branch + system('git-checkout',$ps->{branch}); + } else { + # new branch! we need to verify a few things + die "Branch on a non-tag!" unless $ps->{type} eq 't'; + my $branchpoint = ptag($ps->{tag}); + die "Tagging from unknown id unsupported: $ps->{tag}" + unless $branchpoint; + + # find where we are supposed to branch from + if (! -e "$git_dir/refs/heads/$ps->{branch}") { + system('git-branch',$ps->{branch},$branchpoint) == 0 or die "$! $?\n"; + + # We trust Arch with the fact that this is just a tag, + # and it does not affect the state of the tree, so + # we just tag and move on. If the user really wants us + # to consolidate more branches into one, don't tag because + # the tag name would be already taken. + tag($ps->{id}, $branchpoint); + ptag($ps->{id}, $branchpoint); + print " * Tagged $ps->{id} at $branchpoint\n"; + } + system('git-checkout',$ps->{branch}) == 0 or die "$! $?\n"; + return 0; + } + die $! if $?; + } + + # + # Apply the import/changeset/merge into the working tree + # + if ($ps->{type} eq 'i' || $ps->{type} eq 't') { + apply_import($ps) or die $!; + $stats{import_or_tag}++; + $import=0; + } elsif ($ps->{type} eq 's') { + apply_cset($ps); + $stats{simple_changeset}++; + } + + # + # prepare update git's index, based on what arch knows + # about the pset, resolve parents, etc + # + + my @commitlog = safe_pipe_capture($TLA,'cat-archive-log',$ps->{id}); + die "Error in cat-archive-log: $!" if $?; + + parselog($ps,\@commitlog); + + # imports don't give us good info + # on added files. Shame on them + if ($ps->{type} eq 'i' || $ps->{type} eq 't') { + system('git-ls-files --deleted -z | '. + 'git-update-index --remove -z --stdin') == 0 or die "$! $?\n"; + system('git-ls-files --others -z | '. + 'git-update-index --add -z --stdin') == 0 or die "$! $?\n"; + } + + # TODO: handle removed_directories and renamed_directories: + + if (my $del = $ps->{removed_files}) { + unlink @$del; + while (@$del) { + my @slice = splice(@$del, 0, 100); + system('git-update-index','--remove','--',@slice) == 0 or + die "Error in git-update-index --remove: $! $?\n"; + } + } + + if (my $ren = $ps->{renamed_files}) { # renamed + if (@$ren % 2) { + die "Odd number of entries in rename!?"; + } + + while (@$ren) { + my $from = shift @$ren; + my $to = shift @$ren; + + unless (-d dirname($to)) { + mkpath(dirname($to)); # will die on err + } + # print "moving $from $to"; + rename($from, $to) or die "Error renaming '$from' '$to': $!\n"; + system('git-update-index','--remove','--',$from) == 0 or + die "Error in git-update-index --remove: $! $?\n"; + system('git-update-index','--add','--',$to) == 0 or + die "Error in git-update-index --add: $! $?\n"; + } + } + + if (my $add = $ps->{new_files}) { + while (@$add) { + my @slice = splice(@$add, 0, 100); + system('git-update-index','--add','--',@slice) == 0 or + die "Error in git-update-index --add: $! $?\n"; + } + } + + if (my $mod = $ps->{modified_files}) { + while (@$mod) { + my @slice = splice(@$mod, 0, 100); + system('git-update-index','--',@slice) == 0 or + die "Error in git-update-index: $! $?\n"; + } + } + return 1; # we successfully applied the changeset +} + +if ($opt_f) { + print "Will import patchsets using the fast strategy\n", + "Renamed directories and permission changes will be missed\n"; + *process_patchset = *process_patchset_fast; +} else { + print "Using the default (accurate) import strategy.\n", + "Things may be a bit slow\n"; + *process_patchset = *process_patchset_accurate; +} + +foreach my $ps (@psets) { + # process patchsets + $ps->{branch} = git_branchname($ps->{id}); + + # + # ensure we have a clean state + # + if (my $dirty = `git-diff-files`) { + die "Unclean tree when about to process $ps->{id} " . + " - did we fail to commit cleanly before?\n$dirty"; + } + die $! if $?; + + # + # skip commits already in repo + # + if (ptag($ps->{id})) { + $opt_v && print " * Skipping already imported: $ps->{id}\n"; + next; + } + + print " * Starting to work on $ps->{id}\n"; + + process_patchset($ps) or next; + + # warn "errors when running git-update-index! $!"; + my $tree = `git-write-tree`; + die "cannot write tree $!" if $?; + chomp $tree; + + # + # Who's your daddy? + # + my @par; + if ( -e "$git_dir/refs/heads/$ps->{branch}") { + if (open HEAD, "<","$git_dir/refs/heads/$ps->{branch}") { + my $p = ; + close HEAD; + chomp $p; + push @par, '-p', $p; + } else { + if ($ps->{type} eq 's') { + warn "Could not find the right head for the branch $ps->{branch}"; + } + } + } + + if ($ps->{merges}) { + push @par, find_parents($ps); + } + + # + # Commit, tag and clean state + # + $ENV{TZ} = 'GMT'; + $ENV{GIT_AUTHOR_NAME} = $ps->{author}; + $ENV{GIT_AUTHOR_EMAIL} = $ps->{email}; + $ENV{GIT_AUTHOR_DATE} = $ps->{date}; + $ENV{GIT_COMMITTER_NAME} = $ps->{author}; + $ENV{GIT_COMMITTER_EMAIL} = $ps->{email}; + $ENV{GIT_COMMITTER_DATE} = $ps->{date}; + + my $pid = open2(*READER, *WRITER,'git-commit-tree',$tree,@par) + or die $!; + print WRITER $ps->{summary},"\n\n"; + + # only print message if it's not empty, to avoid a spurious blank line; + # also append an extra newline, so there's a blank line before the + # following "git-archimport-id:" line. + print WRITER $ps->{message},"\n\n" if ($ps->{message} ne ""); + + # make it easy to backtrack and figure out which Arch revision this was: + print WRITER 'git-archimport-id: ',$ps->{id},"\n"; + + close WRITER; + my $commitid = ; # read + chomp $commitid; + close READER; + waitpid $pid,0; # close; + + if (length $commitid != 40) { + die "Something went wrong with the commit! $! $commitid"; + } + # + # Update the branch + # + open HEAD, ">","$git_dir/refs/heads/$ps->{branch}"; + print HEAD $commitid; + close HEAD; + system('git-update-ref', 'HEAD', "$ps->{branch}"); + + # tag accordingly + ptag($ps->{id}, $commitid); # private tag + if ($opt_T || $ps->{type} eq 't' || $ps->{type} eq 'i') { + tag($ps->{id}, $commitid); + } + print " * Committed $ps->{id}\n"; + print " + tree $tree\n"; + print " + commit $commitid\n"; + $opt_v && print " + commit date is $ps->{date} \n"; + $opt_v && print " + parents: ",join(' ',@par),"\n"; +} + +if ($opt_v) { + foreach (sort keys %stats) { + print" $_: $stats{$_}\n"; + } +} +exit 0; + +# used by the accurate strategy: +sub sync_to_ps { + my $ps = shift; + my $tree_dir = $tmp.'/'.tree_dirname($ps->{id}); + + $opt_v && print "sync_to_ps($ps->{id}) method: "; + + if (-d $tree_dir) { + if ($ps->{type} eq 't') { + $opt_v && print "get (tag)\n"; + # looks like a tag-only or (worse,) a mixed tags/changeset branch, + # can't rely on replay to work correctly on these + rmtree($tree_dir); + safe_pipe_capture($TLA,'get','--no-pristine',$ps->{id},$tree_dir); + $stats{get_tag}++; + } else { + my $tree_id = arch_tree_id($tree_dir); + if ($ps->{parent_id} && ($ps->{parent_id} eq $tree_id)) { + # the common case (hopefully) + $opt_v && print "replay\n"; + safe_pipe_capture($TLA,'replay','-d',$tree_dir,$ps->{id}); + $stats{replay}++; + } else { + # getting one tree is usually faster than getting two trees + # and applying the delta ... + rmtree($tree_dir); + $opt_v && print "apply-delta\n"; + safe_pipe_capture($TLA,'get','--no-pristine', + $ps->{id},$tree_dir); + $stats{get_delta}++; + } + } + } else { + # new branch work + $opt_v && print "get (new tree)\n"; + safe_pipe_capture($TLA,'get','--no-pristine',$ps->{id},$tree_dir); + $stats{get_new}++; + } + + # added -I flag to rsync since we're going to fast! AIEEEEE!!!! + system('rsync','-aI','--delete','--exclude',$git_dir, +# '--exclude','.arch-inventory', + '--exclude','.arch-ids','--exclude','{arch}', + '--exclude','+*','--exclude',',*', + "$tree_dir/",'./') == 0 or die "Cannot rsync $tree_dir: $! $?"; + return $tree_dir; +} + +sub apply_import { + my $ps = shift; + my $bname = git_branchname($ps->{id}); + + mkpath($tmp); + + safe_pipe_capture($TLA,'get','-s','--no-pristine',$ps->{id},"$tmp/import"); + die "Cannot get import: $!" if $?; + system('rsync','-aI','--delete', '--exclude',$git_dir, + '--exclude','.arch-ids','--exclude','{arch}', + "$tmp/import/", './'); + die "Cannot rsync import:$!" if $?; + + rmtree("$tmp/import"); + die "Cannot remove tempdir: $!" if $?; + + + return 1; +} + +sub apply_cset { + my $ps = shift; + + mkpath($tmp); + + # get the changeset + safe_pipe_capture($TLA,'get-changeset',$ps->{id},"$tmp/changeset"); + die "Cannot get changeset: $!" if $?; + + # apply patches + if (`find $tmp/changeset/patches -type f -name '*.patch'`) { + # this can be sped up considerably by doing + # (find | xargs cat) | patch + # but that can get mucked up by patches + # with missing trailing newlines or the standard + # 'missing newline' flag in the patch - possibly + # produced with an old/buggy diff. + # slow and safe, we invoke patch once per patchfile + `find $tmp/changeset/patches -type f -name '*.patch' -print0 | grep -zv '{arch}' | xargs -iFILE -0 --no-run-if-empty patch -p1 --forward -iFILE`; + die "Problem applying patches! $!" if $?; + } + + # apply changed binary files + if (my @modified = `find $tmp/changeset/patches -type f -name '*.modified'`) { + foreach my $mod (@modified) { + chomp $mod; + my $orig = $mod; + $orig =~ s/\.modified$//; # lazy + $orig =~ s!^\Q$tmp\E/changeset/patches/!!; + #print "rsync -p '$mod' '$orig'"; + system('rsync','-p',$mod,"./$orig"); + die "Problem applying binary changes! $!" if $?; + } + } + + # bring in new files + system('rsync','-aI','--exclude',$git_dir, + '--exclude','.arch-ids', + '--exclude', '{arch}', + "$tmp/changeset/new-files-archive/",'./'); + + # deleted files are hinted from the commitlog processing + + rmtree("$tmp/changeset"); +} + + +# =for reference +# notes: *-files/-directories keys cannot have spaces, they're always +# pika-escaped. Everything after the first newline +# A log entry looks like: +# Revision: moodle-org--moodle--1.3.3--patch-15 +# Archive: arch-eduforge@catalyst.net.nz--2004 +# Creator: Penny Leach +# Date: Wed May 25 14:15:34 NZST 2005 +# Standard-date: 2005-05-25 02:15:34 GMT +# New-files: lang/de/.arch-ids/block_glossary_random.php.id +# lang/de/.arch-ids/block_html.php.id +# New-directories: lang/de/help/questionnaire +# lang/de/help/questionnaire/.arch-ids +# Renamed-files: .arch-ids/db_sears.sql.id db/.arch-ids/db_sears.sql.id +# db_sears.sql db/db_sears.sql +# Removed-files: lang/be/docs/.arch-ids/release.html.id +# lang/be/docs/.arch-ids/releaseold.html.id +# Modified-files: admin/cron.php admin/delete.php +# admin/editor.html backup/lib.php backup/restore.php +# New-patches: arch-eduforge@catalyst.net.nz--2004/moodle-org--moodle--1.3.3--patch-15 +# Summary: Updating to latest from MOODLE_14_STABLE (1.4.5+) +# summary can be multiline with a leading space just like the above fields +# Keywords: +# +# Updating yadda tadda tadda madda +sub parselog { + my ($ps, $log) = @_; + my $key = undef; + + # headers we want that contain filenames: + my %want_headers = ( + new_files => 1, + modified_files => 1, + renamed_files => 1, + renamed_directories => 1, + removed_files => 1, + removed_directories => 1, + ); + + chomp (@$log); + while ($_ = shift @$log) { + if (/^Continuation-of:\s*(.*)/) { + $ps->{tag} = $1; + $key = undef; + } elsif (/^Summary:\s*(.*)$/ ) { + # summary can be multiline as long as it has a leading space. + # we squeeze it onto a single line, though. + $ps->{summary} = [ $1 ]; + $key = 'summary'; + } elsif (/^Creator: (.*)\s*<([^\>]+)>/) { + $ps->{author} = $1; + $ps->{email} = $2; + $key = undef; + # any *-files or *-directories can be read here: + } elsif (/^([A-Z][a-z\-]+):\s*(.*)$/) { + my $val = $2; + $key = lc $1; + $key =~ tr/-/_/; # too lazy to quote :P + if ($want_headers{$key}) { + push @{$ps->{$key}}, split(/\s+/, $val); + } else { + $key = undef; + } + } elsif (/^$/) { + last; # remainder of @$log that didn't get shifted off is message + } elsif ($key) { + if (/^\s+(.*)$/) { + if ($key eq 'summary') { + push @{$ps->{$key}}, $1; + } else { # files/directories: + push @{$ps->{$key}}, split(/\s+/, $1); + } + } else { + $key = undef; + } + } + } + + # drop leading empty lines from the log message + while (@$log && $log->[0] eq '') { + shift @$log; + } + if (exists $ps->{summary} && @{$ps->{summary}}) { + $ps->{summary} = join(' ', @{$ps->{summary}}); + } + elsif (@$log == 0) { + $ps->{summary} = 'empty commit message'; + } else { + $ps->{summary} = $log->[0] . '...'; + } + $ps->{message} = join("\n",@$log); + + # skip Arch control files, unescape pika-escaped files + foreach my $k (keys %want_headers) { + next unless (defined $ps->{$k}); + my @tmp = (); + foreach my $t (@{$ps->{$k}}) { + next unless length ($t); + next if $t =~ m!\{arch\}/!; + next if $t =~ m!\.arch-ids/!; + # should we skip this? + next if $t =~ m!\.arch-inventory$!; + # tla cat-archive-log will give us filenames with spaces as file\(sp)name - why? + # we can assume that any filename with \ indicates some pika escaping that we want to get rid of. + if ($t =~ /\\/ ){ + $t = (safe_pipe_capture($TLA,'escape','--unescaped',$t))[0]; + } + push @tmp, $t; + } + $ps->{$k} = \@tmp; + } +} + +# write/read a tag +sub tag { + my ($tag, $commit) = @_; + + if ($opt_o) { + $tag =~ s|/|--|g; + } else { + my $patchname = $tag; + $patchname =~ s/.*--//; + $tag = git_branchname ($tag) . '--' . $patchname; + } + + if ($commit) { + open(C,">","$git_dir/refs/tags/$tag") + or die "Cannot create tag $tag: $!\n"; + print C "$commit\n" + or die "Cannot write tag $tag: $!\n"; + close(C) + or die "Cannot write tag $tag: $!\n"; + print " * Created tag '$tag' on '$commit'\n" if $opt_v; + } else { # read + open(C,"<","$git_dir/refs/tags/$tag") + or die "Cannot read tag $tag: $!\n"; + $commit = ; + chomp $commit; + die "Error reading tag $tag: $!\n" unless length $commit == 40; + close(C) + or die "Cannot read tag $tag: $!\n"; + return $commit; + } +} + +# write/read a private tag +# reads fail softly if the tag isn't there +sub ptag { + my ($tag, $commit) = @_; + + # don't use subdirs for tags yet, it could screw up other porcelains + $tag =~ s|/|,|g; + + my $tag_file = "$ptag_dir/$tag"; + my $tag_branch_dir = dirname($tag_file); + mkpath($tag_branch_dir) unless (-d $tag_branch_dir); + + if ($commit) { # write + open(C,">",$tag_file) + or die "Cannot create tag $tag: $!\n"; + print C "$commit\n" + or die "Cannot write tag $tag: $!\n"; + close(C) + or die "Cannot write tag $tag: $!\n"; + $rptags{$commit} = $tag + unless $tag =~ m/--base-0$/; + } else { # read + # if the tag isn't there, return 0 + unless ( -s $tag_file) { + return 0; + } + open(C,"<",$tag_file) + or die "Cannot read tag $tag: $!\n"; + $commit = ; + chomp $commit; + die "Error reading tag $tag: $!\n" unless length $commit == 40; + close(C) + or die "Cannot read tag $tag: $!\n"; + unless (defined $rptags{$commit}) { + $rptags{$commit} = $tag; + } + return $commit; + } +} + +sub find_parents { + # + # Identify what branches are merging into me + # and whether we are fully merged + # git-merge-base should tell + # me what the base of the merge should be + # + my $ps = shift; + + my %branches; # holds an arrayref per branch + # the arrayref contains a list of + # merged patches between the base + # of the merge and the current head + + my @parents; # parents found for this commit + + # simple loop to split the merges + # per branch + foreach my $merge (@{$ps->{merges}}) { + my $branch = git_branchname($merge); + unless (defined $branches{$branch} ){ + $branches{$branch} = []; + } + push @{$branches{$branch}}, $merge; + } + + # + # foreach branch find a merge base and walk it to the + # head where we are, collecting the merged patchsets that + # Arch has recorded. Keep that in @have + # Compare that with the commits on the other branch + # between merge-base and the tip of the branch (@need) + # and see if we have a series of consecutive patches + # starting from the merge base. The tip of the series + # of consecutive patches merged is our new parent for + # that branch. + # + foreach my $branch (keys %branches) { + + # check that we actually know about the branch + next unless -e "$git_dir/refs/heads/$branch"; + + my $mergebase = `git-merge-base $branch $ps->{branch}`; + if ($?) { + # Don't die here, Arch supports one-way cherry-picking + # between branches with no common base (or any relationship + # at all beforehand) + warn "Cannot find merge base for $branch and $ps->{branch}"; + next; + } + chomp $mergebase; + + # now walk up to the mergepoint collecting what patches we have + my $branchtip = git_rev_parse($ps->{branch}); + my @ancestors = `git-rev-list --topo-order $branchtip ^$mergebase`; + my %have; # collected merges this branch has + foreach my $merge (@{$ps->{merges}}) { + $have{$merge} = 1; + } + my %ancestorshave; + foreach my $par (@ancestors) { + $par = commitid2pset($par); + if (defined $par->{merges}) { + foreach my $merge (@{$par->{merges}}) { + $ancestorshave{$merge}=1; + } + } + } + # print "++++ Merges in $ps->{id} are....\n"; + # my @have = sort keys %have; print Dumper(\@have); + + # merge what we have with what ancestors have + %have = (%have, %ancestorshave); + + # see what the remote branch has - these are the merges we + # will want to have in a consecutive series from the mergebase + my $otherbranchtip = git_rev_parse($branch); + my @needraw = `git-rev-list --topo-order $otherbranchtip ^$mergebase`; + my @need; + foreach my $needps (@needraw) { # get the psets + $needps = commitid2pset($needps); + # git-rev-list will also + # list commits merged in via earlier + # merges. we are only interested in commits + # from the branch we're looking at + if ($branch eq $needps->{branch}) { + push @need, $needps->{id}; + } + } + + # print "++++ Merges from $branch we want are....\n"; + # print Dumper(\@need); + + my $newparent; + while (my $needed_commit = pop @need) { + if ($have{$needed_commit}) { + $newparent = $needed_commit; + } else { + last; # break out of the while + } + } + if ($newparent) { + push @parents, $newparent; + } + + + } # end foreach branch + + # prune redundant parents + my %parents; + foreach my $p (@parents) { + $parents{$p} = 1; + } + foreach my $p (@parents) { + next unless exists $psets{$p}{merges}; + next unless ref $psets{$p}{merges}; + my @merges = @{$psets{$p}{merges}}; + foreach my $merge (@merges) { + if ($parents{$merge}) { + delete $parents{$merge}; + } + } + } + + @parents = (); + foreach (keys %parents) { + push @parents, '-p', ptag($_); + } + return @parents; +} + +sub git_rev_parse { + my $name = shift; + my $val = `git-rev-parse $name`; + die "Error: git-rev-parse $name" if $?; + chomp $val; + return $val; +} + +# resolve a SHA1 to a known patchset +sub commitid2pset { + my $commitid = shift; + chomp $commitid; + my $name = $rptags{$commitid} + || die "Cannot find reverse tag mapping for $commitid"; + $name =~ s|,|/|; + my $ps = $psets{$name} + || (print Dumper(sort keys %psets)) && die "Cannot find patchset for $name"; + return $ps; +} + + +# an alternative to `command` that allows input to be passed as an array +# to work around shell problems with weird characters in arguments +sub safe_pipe_capture { + my @output; + if (my $pid = open my $child, '-|') { + @output = (<$child>); + close $child or die join(' ',@_).": $! $?"; + } else { + exec(@_) or die "$! $?"; # exec() can fail the executable can't be found + } + return wantarray ? @output : join('',@output); +} + +# `tla logs -rf -d | head -n1` or `baz tree-id ` +sub arch_tree_id { + my $dir = shift; + chomp( my $ret = (safe_pipe_capture($TLA,'logs','-rf','-d',$dir))[0] ); + return $ret; +} + +sub archive_reachable { + my $archive = shift; + return 1 if $reachable{$archive}; + return 0 if $unreachable{$archive}; + + if (system "$TLA whereis-archive $archive >/dev/null") { + if ($opt_a && (system($TLA,'register-archive', + "http://mirrors.sourcecontrol.net/$archive") == 0)) { + $reachable{$archive} = 1; + return 1; + } + print STDERR "Archive is unreachable: $archive\n"; + $unreachable{$archive} = 1; + return 0; + } else { + $reachable{$archive} = 1; + return 1; + } +} diff --git a/SparkleShare/Mac/git/libexec/git-core/git-archive b/SparkleShare/Mac/git/libexec/git-core/git-archive new file mode 120000 index 00000000..f2cccfc2 --- /dev/null +++ b/SparkleShare/Mac/git/libexec/git-core/git-archive @@ -0,0 +1 @@ +../../bin/git \ No newline at end of file diff --git a/SparkleShare/Mac/git/libexec/git-core/git-bisect b/SparkleShare/Mac/git/libexec/git-core/git-bisect new file mode 100755 index 00000000..c21e33c8 --- /dev/null +++ b/SparkleShare/Mac/git/libexec/git-core/git-bisect @@ -0,0 +1,457 @@ +#!/bin/sh + +USAGE='[help|start|bad|good|skip|next|reset|visualize|replay|log|run]' +LONG_USAGE='git bisect help + print this long help message. +git bisect start [ [...]] [--] [...] + reset bisect state and start bisection. +git bisect bad [] + mark a known-bad revision. +git bisect good [...] + mark ... known-good revisions. +git bisect skip [(|)...] + mark ... untestable revisions. +git bisect next + find next bisection to test and check it out. +git bisect reset [] + finish bisection search and go back to commit. +git bisect visualize + show bisect status in gitk. +git bisect replay + replay bisection log. +git bisect log + show bisect log. +git bisect run ... + use ... to automatically bisect. + +Please use "git help bisect" to get the full man page.' + +OPTIONS_SPEC= +. git-sh-setup +require_work_tree + +_x40='[0-9a-f][0-9a-f][0-9a-f][0-9a-f][0-9a-f]' +_x40="$_x40$_x40$_x40$_x40$_x40$_x40$_x40$_x40" + +bisect_autostart() { + test -s "$GIT_DIR/BISECT_START" || { + echo >&2 'You need to start by "git bisect start"' + if test -t 0 + then + echo >&2 -n 'Do you want me to do it for you [Y/n]? ' + read yesno + case "$yesno" in + [Nn]*) + exit ;; + esac + bisect_start + else + exit 1 + fi + } +} + +bisect_start() { + # + # Verify HEAD. + # + head=$(GIT_DIR="$GIT_DIR" git symbolic-ref -q HEAD) || + head=$(GIT_DIR="$GIT_DIR" git rev-parse --verify HEAD) || + die "Bad HEAD - I need a HEAD" + + # + # Check if we are bisecting. + # + start_head='' + if test -s "$GIT_DIR/BISECT_START" + then + # Reset to the rev from where we started. + start_head=$(cat "$GIT_DIR/BISECT_START") + git checkout "$start_head" -- || exit + else + # Get rev from where we start. + case "$head" in + refs/heads/*|$_x40) + # This error message should only be triggered by + # cogito usage, and cogito users should understand + # it relates to cg-seek. + [ -s "$GIT_DIR/head-name" ] && + die "won't bisect on seeked tree" + start_head="${head#refs/heads/}" + ;; + *) + die "Bad HEAD - strange symbolic ref" + ;; + esac + fi + + # + # Get rid of any old bisect state. + # + bisect_clean_state || exit + + # + # Check for one bad and then some good revisions. + # + has_double_dash=0 + for arg; do + case "$arg" in --) has_double_dash=1; break ;; esac + done + orig_args=$(git rev-parse --sq-quote "$@") + bad_seen=0 + eval='' + while [ $# -gt 0 ]; do + arg="$1" + case "$arg" in + --) + shift + break + ;; + *) + rev=$(git rev-parse -q --verify "$arg^{commit}") || { + test $has_double_dash -eq 1 && + die "'$arg' does not appear to be a valid revision" + break + } + case $bad_seen in + 0) state='bad' ; bad_seen=1 ;; + *) state='good' ;; + esac + eval="$eval bisect_write '$state' '$rev' 'nolog'; " + shift + ;; + esac + done + + # + # Change state. + # In case of mistaken revs or checkout error, or signals received, + # "bisect_auto_next" below may exit or misbehave. + # We have to trap this to be able to clean up using + # "bisect_clean_state". + # + trap 'bisect_clean_state' 0 + trap 'exit 255' 1 2 3 15 + + # + # Write new start state. + # + echo "$start_head" >"$GIT_DIR/BISECT_START" && + git rev-parse --sq-quote "$@" >"$GIT_DIR/BISECT_NAMES" && + eval "$eval" && + echo "git bisect start$orig_args" >>"$GIT_DIR/BISECT_LOG" || exit + # + # Check if we can proceed to the next bisect state. + # + bisect_auto_next + + trap '-' 0 +} + +bisect_write() { + state="$1" + rev="$2" + nolog="$3" + case "$state" in + bad) tag="$state" ;; + good|skip) tag="$state"-"$rev" ;; + *) die "Bad bisect_write argument: $state" ;; + esac + git update-ref "refs/bisect/$tag" "$rev" || exit + echo "# $state: $(git show-branch $rev)" >>"$GIT_DIR/BISECT_LOG" + test -n "$nolog" || echo "git bisect $state $rev" >>"$GIT_DIR/BISECT_LOG" +} + +is_expected_rev() { + test -f "$GIT_DIR/BISECT_EXPECTED_REV" && + test "$1" = $(cat "$GIT_DIR/BISECT_EXPECTED_REV") +} + +check_expected_revs() { + for _rev in "$@"; do + if ! is_expected_rev "$_rev"; then + rm -f "$GIT_DIR/BISECT_ANCESTORS_OK" + rm -f "$GIT_DIR/BISECT_EXPECTED_REV" + return + fi + done +} + +bisect_skip() { + all='' + for arg in "$@" + do + case "$arg" in + *..*) + revs=$(git rev-list "$arg") || die "Bad rev input: $arg" ;; + *) + revs=$(git rev-parse --sq-quote "$arg") ;; + esac + all="$all $revs" + done + eval bisect_state 'skip' $all +} + +bisect_state() { + bisect_autostart + state=$1 + case "$#,$state" in + 0,*) + die "Please call 'bisect_state' with at least one argument." ;; + 1,bad|1,good|1,skip) + rev=$(git rev-parse --verify HEAD) || + die "Bad rev input: HEAD" + bisect_write "$state" "$rev" + check_expected_revs "$rev" ;; + 2,bad|*,good|*,skip) + shift + eval='' + for rev in "$@" + do + sha=$(git rev-parse --verify "$rev^{commit}") || + die "Bad rev input: $rev" + eval="$eval bisect_write '$state' '$sha'; " + done + eval "$eval" + check_expected_revs "$@" ;; + *,bad) + die "'git bisect bad' can take only one argument." ;; + *) + usage ;; + esac + bisect_auto_next +} + +bisect_next_check() { + missing_good= missing_bad= + git show-ref -q --verify refs/bisect/bad || missing_bad=t + test -n "$(git for-each-ref "refs/bisect/good-*")" || missing_good=t + + case "$missing_good,$missing_bad,$1" in + ,,*) + : have both good and bad - ok + ;; + *,) + # do not have both but not asked to fail - just report. + false + ;; + t,,good) + # have bad but not good. we could bisect although + # this is less optimum. + echo >&2 'Warning: bisecting only with a bad commit.' + if test -t 0 + then + printf >&2 'Are you sure [Y/n]? ' + read yesno + case "$yesno" in [Nn]*) exit 1 ;; esac + fi + : bisect without good... + ;; + *) + THEN='' + test -s "$GIT_DIR/BISECT_START" || { + echo >&2 'You need to start by "git bisect start".' + THEN='then ' + } + echo >&2 'You '$THEN'need to give me at least one good' \ + 'and one bad revisions.' + echo >&2 '(You can use "git bisect bad" and' \ + '"git bisect good" for that.)' + exit 1 ;; + esac +} + +bisect_auto_next() { + bisect_next_check && bisect_next || : +} + +bisect_next() { + case "$#" in 0) ;; *) usage ;; esac + bisect_autostart + bisect_next_check good + + # Perform all bisection computation, display and checkout + git bisect--helper --next-all + res=$? + + # Check if we should exit because bisection is finished + test $res -eq 10 && exit 0 + + # Check for an error in the bisection process + test $res -ne 0 && exit $res + + return 0 +} + +bisect_visualize() { + bisect_next_check fail + + if test $# = 0 + then + case "${DISPLAY+set}${SESSIONNAME+set}${MSYSTEM+set}${SECURITYSESSIONID+set}" in + '') set git log ;; + set*) set gitk ;; + esac + else + case "$1" in + git*|tig) ;; + -*) set git log "$@" ;; + *) set git "$@" ;; + esac + fi + + eval '"$@"' --bisect -- $(cat "$GIT_DIR/BISECT_NAMES") +} + +bisect_reset() { + test -s "$GIT_DIR/BISECT_START" || { + echo "We are not bisecting." + return + } + case "$#" in + 0) branch=$(cat "$GIT_DIR/BISECT_START") ;; + 1) git rev-parse --quiet --verify "$1^{commit}" > /dev/null || + die "'$1' is not a valid commit" + branch="$1" ;; + *) + usage ;; + esac + if git checkout "$branch" -- ; then + bisect_clean_state + else + die "Could not check out original HEAD '$branch'." \ + "Try 'git bisect reset '." + fi +} + +bisect_clean_state() { + # There may be some refs packed during bisection. + git for-each-ref --format='%(refname) %(objectname)' refs/bisect/\* | + while read ref hash + do + git update-ref -d $ref $hash || exit + done + rm -f "$GIT_DIR/BISECT_EXPECTED_REV" && + rm -f "$GIT_DIR/BISECT_ANCESTORS_OK" && + rm -f "$GIT_DIR/BISECT_LOG" && + rm -f "$GIT_DIR/BISECT_NAMES" && + rm -f "$GIT_DIR/BISECT_RUN" && + # Cleanup head-name if it got left by an old version of git-bisect + rm -f "$GIT_DIR/head-name" && + + rm -f "$GIT_DIR/BISECT_START" +} + +bisect_replay () { + test "$#" -eq 1 || die "No logfile given" + test -r "$1" || die "cannot read $1 for replaying" + bisect_reset + while read git bisect command rev + do + test "$git $bisect" = "git bisect" -o "$git" = "git-bisect" || continue + if test "$git" = "git-bisect"; then + rev="$command" + command="$bisect" + fi + case "$command" in + start) + cmd="bisect_start $rev" + eval "$cmd" ;; + good|bad|skip) + bisect_write "$command" "$rev" ;; + *) + die "?? what are you talking about?" ;; + esac + done <"$1" + bisect_auto_next +} + +bisect_run () { + bisect_next_check fail + + while true + do + echo "running $@" + "$@" + res=$? + + # Check for really bad run error. + if [ $res -lt 0 -o $res -ge 128 ]; then + echo >&2 "bisect run failed:" + echo >&2 "exit code $res from '$@' is < 0 or >= 128" + exit $res + fi + + # Find current state depending on run success or failure. + # A special exit code of 125 means cannot test. + if [ $res -eq 125 ]; then + state='skip' + elif [ $res -gt 0 ]; then + state='bad' + else + state='good' + fi + + # We have to use a subshell because "bisect_state" can exit. + ( bisect_state $state > "$GIT_DIR/BISECT_RUN" ) + res=$? + + cat "$GIT_DIR/BISECT_RUN" + + if sane_grep "first bad commit could be any of" "$GIT_DIR/BISECT_RUN" \ + > /dev/null; then + echo >&2 "bisect run cannot continue any more" + exit $res + fi + + if [ $res -ne 0 ]; then + echo >&2 "bisect run failed:" + echo >&2 "'bisect_state $state' exited with error code $res" + exit $res + fi + + if sane_grep "is the first bad commit" "$GIT_DIR/BISECT_RUN" > /dev/null; then + echo "bisect run success" + exit 0; + fi + + done +} + +bisect_log () { + test -s "$GIT_DIR/BISECT_LOG" || die "We are not bisecting." + cat "$GIT_DIR/BISECT_LOG" +} + +case "$#" in +0) + usage ;; +*) + cmd="$1" + shift + case "$cmd" in + help) + git bisect -h ;; + start) + bisect_start "$@" ;; + bad|good) + bisect_state "$cmd" "$@" ;; + skip) + bisect_skip "$@" ;; + next) + # Not sure we want "next" at the UI level anymore. + bisect_next "$@" ;; + visualize|view) + bisect_visualize "$@" ;; + reset) + bisect_reset "$@" ;; + replay) + bisect_replay "$@" ;; + log) + bisect_log ;; + run) + bisect_run "$@" ;; + *) + usage ;; + esac +esac diff --git a/SparkleShare/Mac/git/libexec/git-core/git-bisect--helper b/SparkleShare/Mac/git/libexec/git-core/git-bisect--helper new file mode 120000 index 00000000..f2cccfc2 --- /dev/null +++ b/SparkleShare/Mac/git/libexec/git-core/git-bisect--helper @@ -0,0 +1 @@ +../../bin/git \ No newline at end of file diff --git a/SparkleShare/Mac/git/libexec/git-core/git-blame b/SparkleShare/Mac/git/libexec/git-core/git-blame new file mode 120000 index 00000000..f2cccfc2 --- /dev/null +++ b/SparkleShare/Mac/git/libexec/git-core/git-blame @@ -0,0 +1 @@ +../../bin/git \ No newline at end of file diff --git a/SparkleShare/Mac/git/libexec/git-core/git-branch b/SparkleShare/Mac/git/libexec/git-core/git-branch new file mode 120000 index 00000000..f2cccfc2 --- /dev/null +++ b/SparkleShare/Mac/git/libexec/git-core/git-branch @@ -0,0 +1 @@ +../../bin/git \ No newline at end of file diff --git a/SparkleShare/Mac/git/libexec/git-core/git-bundle b/SparkleShare/Mac/git/libexec/git-core/git-bundle new file mode 120000 index 00000000..f2cccfc2 --- /dev/null +++ b/SparkleShare/Mac/git/libexec/git-core/git-bundle @@ -0,0 +1 @@ +../../bin/git \ No newline at end of file diff --git a/SparkleShare/Mac/git/libexec/git-core/git-cat-file b/SparkleShare/Mac/git/libexec/git-core/git-cat-file new file mode 120000 index 00000000..f2cccfc2 --- /dev/null +++ b/SparkleShare/Mac/git/libexec/git-core/git-cat-file @@ -0,0 +1 @@ +../../bin/git \ No newline at end of file diff --git a/SparkleShare/Mac/git/libexec/git-core/git-check-attr b/SparkleShare/Mac/git/libexec/git-core/git-check-attr new file mode 120000 index 00000000..f2cccfc2 --- /dev/null +++ b/SparkleShare/Mac/git/libexec/git-core/git-check-attr @@ -0,0 +1 @@ +../../bin/git \ No newline at end of file diff --git a/SparkleShare/Mac/git/libexec/git-core/git-check-ref-format b/SparkleShare/Mac/git/libexec/git-core/git-check-ref-format new file mode 120000 index 00000000..f2cccfc2 --- /dev/null +++ b/SparkleShare/Mac/git/libexec/git-core/git-check-ref-format @@ -0,0 +1 @@ +../../bin/git \ No newline at end of file diff --git a/SparkleShare/Mac/git/libexec/git-core/git-checkout b/SparkleShare/Mac/git/libexec/git-core/git-checkout new file mode 120000 index 00000000..f2cccfc2 --- /dev/null +++ b/SparkleShare/Mac/git/libexec/git-core/git-checkout @@ -0,0 +1 @@ +../../bin/git \ No newline at end of file diff --git a/SparkleShare/Mac/git/libexec/git-core/git-checkout-index b/SparkleShare/Mac/git/libexec/git-core/git-checkout-index new file mode 120000 index 00000000..f2cccfc2 --- /dev/null +++ b/SparkleShare/Mac/git/libexec/git-core/git-checkout-index @@ -0,0 +1 @@ +../../bin/git \ No newline at end of file diff --git a/SparkleShare/Mac/git/libexec/git-core/git-cherry b/SparkleShare/Mac/git/libexec/git-core/git-cherry new file mode 120000 index 00000000..f2cccfc2 --- /dev/null +++ b/SparkleShare/Mac/git/libexec/git-core/git-cherry @@ -0,0 +1 @@ +../../bin/git \ No newline at end of file diff --git a/SparkleShare/Mac/git/libexec/git-core/git-cherry-pick b/SparkleShare/Mac/git/libexec/git-core/git-cherry-pick new file mode 120000 index 00000000..f2cccfc2 --- /dev/null +++ b/SparkleShare/Mac/git/libexec/git-core/git-cherry-pick @@ -0,0 +1 @@ +../../bin/git \ No newline at end of file diff --git a/SparkleShare/Mac/git/libexec/git-core/git-citool b/SparkleShare/Mac/git/libexec/git-core/git-citool new file mode 100755 index 00000000..d2499def --- /dev/null +++ b/SparkleShare/Mac/git/libexec/git-core/git-citool @@ -0,0 +1,8 @@ +#!/bin/sh +if test "z$*" = zversion || + test "z$*" = z--version +then + echo 'git-gui version 0.13.0.8.g8f85' +else + exec '/usr/local/git/share/git-gui/lib/Git Gui.app/Contents/MacOS/Wish' "$0" "$@" +fi diff --git a/SparkleShare/Mac/git/libexec/git-core/git-clean b/SparkleShare/Mac/git/libexec/git-core/git-clean new file mode 120000 index 00000000..f2cccfc2 --- /dev/null +++ b/SparkleShare/Mac/git/libexec/git-core/git-clean @@ -0,0 +1 @@ +../../bin/git \ No newline at end of file diff --git a/SparkleShare/Mac/git/libexec/git-core/git-clone b/SparkleShare/Mac/git/libexec/git-core/git-clone new file mode 120000 index 00000000..f2cccfc2 --- /dev/null +++ b/SparkleShare/Mac/git/libexec/git-core/git-clone @@ -0,0 +1 @@ +../../bin/git \ No newline at end of file diff --git a/SparkleShare/Mac/git/libexec/git-core/git-commit b/SparkleShare/Mac/git/libexec/git-core/git-commit new file mode 120000 index 00000000..f2cccfc2 --- /dev/null +++ b/SparkleShare/Mac/git/libexec/git-core/git-commit @@ -0,0 +1 @@ +../../bin/git \ No newline at end of file diff --git a/SparkleShare/Mac/git/libexec/git-core/git-commit-tree b/SparkleShare/Mac/git/libexec/git-core/git-commit-tree new file mode 120000 index 00000000..f2cccfc2 --- /dev/null +++ b/SparkleShare/Mac/git/libexec/git-core/git-commit-tree @@ -0,0 +1 @@ +../../bin/git \ No newline at end of file diff --git a/SparkleShare/Mac/git/libexec/git-core/git-config b/SparkleShare/Mac/git/libexec/git-core/git-config new file mode 120000 index 00000000..f2cccfc2 --- /dev/null +++ b/SparkleShare/Mac/git/libexec/git-core/git-config @@ -0,0 +1 @@ +../../bin/git \ No newline at end of file diff --git a/SparkleShare/Mac/git/libexec/git-core/git-count-objects b/SparkleShare/Mac/git/libexec/git-core/git-count-objects new file mode 120000 index 00000000..f2cccfc2 --- /dev/null +++ b/SparkleShare/Mac/git/libexec/git-core/git-count-objects @@ -0,0 +1 @@ +../../bin/git \ No newline at end of file diff --git a/SparkleShare/Mac/git/libexec/git-core/git-cvsexportcommit b/SparkleShare/Mac/git/libexec/git-core/git-cvsexportcommit new file mode 100755 index 00000000..b7558c97 --- /dev/null +++ b/SparkleShare/Mac/git/libexec/git-core/git-cvsexportcommit @@ -0,0 +1,456 @@ +#!/usr/bin/perl +use lib (split(/:/, $ENV{GITPERLLIB} || "/usr/local/git/lib/perl5/site_perl")); + +use 5.008; +use strict; +use warnings; +use Getopt::Std; +use File::Temp qw(tempdir); +use Data::Dumper; +use File::Basename qw(basename dirname); +use File::Spec; +use Git; + +our ($opt_h, $opt_P, $opt_p, $opt_v, $opt_c, $opt_f, $opt_a, $opt_m, $opt_d, $opt_u, $opt_w, $opt_W, $opt_k); + +getopts('uhPpvcfkam:d:w:W'); + +$opt_h && usage(); + +die "Need at least one commit identifier!" unless @ARGV; + +# Get git-config settings +my $repo = Git->repository(); +$opt_w = $repo->config('cvsexportcommit.cvsdir') unless defined $opt_w; + +if ($opt_w || $opt_W) { + # Remember where GIT_DIR is before changing to CVS checkout + unless ($ENV{GIT_DIR}) { + # No GIT_DIR set. Figure it out for ourselves + my $gd =`git-rev-parse --git-dir`; + chomp($gd); + $ENV{GIT_DIR} = $gd; + } + # Make sure GIT_DIR is absolute + $ENV{GIT_DIR} = File::Spec->rel2abs($ENV{GIT_DIR}); +} + +if ($opt_w) { + if (! -d $opt_w."/CVS" ) { + die "$opt_w is not a CVS checkout"; + } + chdir $opt_w or die "Cannot change to CVS checkout at $opt_w"; +} +unless ($ENV{GIT_DIR} && -r $ENV{GIT_DIR}){ + die "GIT_DIR is not defined or is unreadable"; +} + + +my @cvs; +if ($opt_d) { + @cvs = ('cvs', '-d', $opt_d); +} else { + @cvs = ('cvs'); +} + +# resolve target commit +my $commit; +$commit = pop @ARGV; +$commit = safe_pipe_capture('git-rev-parse', '--verify', "$commit^0"); +chomp $commit; +if ($?) { + die "The commit reference $commit did not resolve!"; +} + +# resolve what parent we want +my $parent; +if (@ARGV) { + $parent = pop @ARGV; + $parent = safe_pipe_capture('git-rev-parse', '--verify', "$parent^0"); + chomp $parent; + if ($?) { + die "The parent reference did not resolve!"; + } +} + +# find parents from the commit itself +my @commit = safe_pipe_capture('git-cat-file', 'commit', $commit); +my @parents; +my $committer; +my $author; +my $stage = 'headers'; # headers, msg +my $title; +my $msg = ''; + +foreach my $line (@commit) { + chomp $line; + if ($stage eq 'headers' && $line eq '') { + $stage = 'msg'; + next; + } + + if ($stage eq 'headers') { + if ($line =~ m/^parent (\w{40})$/) { # found a parent + push @parents, $1; + } elsif ($line =~ m/^author (.+) \d+ [-+]\d+$/) { + $author = $1; + } elsif ($line =~ m/^committer (.+) \d+ [-+]\d+$/) { + $committer = $1; + } + } else { + $msg .= $line . "\n"; + unless ($title) { + $title = $line; + } + } +} + +my $noparent = "0000000000000000000000000000000000000000"; +if ($parent) { + my $found; + # double check that it's a valid parent + foreach my $p (@parents) { + if ($p eq $parent) { + $found = 1; + last; + }; # found it + } + die "Did not find $parent in the parents for this commit!" if !$found and !$opt_P; +} else { # we don't have a parent from the cmdline... + if (@parents == 1) { # it's safe to get it from the commit + $parent = $parents[0]; + } elsif (@parents == 0) { # there is no parent + $parent = $noparent; + } else { # cannot choose automatically from multiple parents + die "This commit has more than one parent -- please name the parent you want to use explicitly"; + } +} + +my $go_back_to = 0; + +if ($opt_W) { + $opt_v && print "Resetting to $parent\n"; + $go_back_to = `git symbolic-ref HEAD 2> /dev/null || + git rev-parse HEAD` || die "Could not determine current branch"; + system("git checkout -q $parent^0") && die "Could not check out $parent^0"; +} + +$opt_v && print "Applying to CVS commit $commit from parent $parent\n"; + +# grab the commit message +open(MSG, ">.msg") or die "Cannot open .msg for writing"; +if ($opt_m) { + print MSG $opt_m; +} +print MSG $msg; +if ($opt_a) { + print MSG "\n\nAuthor: $author\n"; + if ($author ne $committer) { + print MSG "Committer: $committer\n"; + } +} +close MSG; + +if ($parent eq $noparent) { + `git-diff-tree --binary -p --root $commit >.cvsexportcommit.diff`;# || die "Cannot diff"; +} else { + `git-diff-tree --binary -p $parent $commit >.cvsexportcommit.diff`;# || die "Cannot diff"; +} + +## apply non-binary changes + +# In pedantic mode require all lines of context to match. In normal +# mode, be compatible with diff/patch: assume 3 lines of context and +# require at least one line match, i.e. ignore at most 2 lines of +# context, like diff/patch do by default. +my $context = $opt_p ? '' : '-C1'; + +print "Checking if patch will apply\n"; + +my @stat; +open APPLY, "GIT_DIR= git-apply $context --summary --numstat<.cvsexportcommit.diff|" || die "cannot patch"; +@stat=; +close APPLY || die "Cannot patch"; +my (@bfiles,@files,@afiles,@dfiles); +chomp @stat; +foreach (@stat) { + push (@bfiles,$1) if m/^-\t-\t(.*)$/; + push (@files, $1) if m/^-\t-\t(.*)$/; + push (@files, $1) if m/^\d+\t\d+\t(.*)$/; + push (@afiles,$1) if m/^ create mode [0-7]+ (.*)$/; + push (@dfiles,$1) if m/^ delete mode [0-7]+ (.*)$/; +} +map { s/^"(.*)"$/$1/g } @bfiles,@files; +map { s/\\([0-7]{3})/sprintf('%c',oct $1)/eg } @bfiles,@files; + +# check that the files are clean and up to date according to cvs +my $dirty; +my @dirs; +foreach my $p (@afiles) { + my $path = dirname $p; + while (!-d $path and ! grep { $_ eq $path } @dirs) { + unshift @dirs, $path; + $path = dirname $path; + } +} + +# ... check dirs, +foreach my $d (@dirs) { + if (-e $d) { + $dirty = 1; + warn "$d exists and is not a directory!\n"; + } +} + +# ... query status of all files that we have a directory for and parse output of 'cvs status' to %cvsstat. +my @canstatusfiles; +foreach my $f (@files) { + my $path = dirname $f; + next if (grep { $_ eq $path } @dirs); + push @canstatusfiles, $f; +} + +my %cvsstat; +if (@canstatusfiles) { + if ($opt_u) { + my @updated = xargs_safe_pipe_capture([@cvs, 'update'], @canstatusfiles); + print @updated; + } + # "cvs status" reorders the parameters, notably when there are multiple + # arguments with the same basename. So be precise here. + + my %added = map { $_ => 1 } @afiles; + my %todo = map { $_ => 1 } @canstatusfiles; + + while (%todo) { + my @canstatusfiles2 = (); + my %fullname = (); + foreach my $name (keys %todo) { + my $basename = basename($name); + + # CVS reports files that don't exist in the current revision as + # "no file $basename" in its "status" output, so we should + # anticipate that. Totally unknown files will have a status + # "Unknown". However, if they exist in the Attic, their status + # will be "Up-to-date" (this means they were added once but have + # been removed). + $basename = "no file $basename" if $added{$basename}; + + $basename =~ s/^\s+//; + $basename =~ s/\s+$//; + + if (!exists($fullname{$basename})) { + $fullname{$basename} = $name; + push (@canstatusfiles2, $name); + delete($todo{$name}); + } + } + my @cvsoutput; + @cvsoutput = xargs_safe_pipe_capture([@cvs, 'status'], @canstatusfiles2); + foreach my $l (@cvsoutput) { + chomp $l; + next unless + my ($file, $status) = $l =~ /^File:\s+(.*\S)\s+Status: (.*)$/; + + my $fullname = $fullname{$file}; + print STDERR "Huh? Status '$status' reported for unexpected file '$file'\n" + unless defined $fullname; + + # This response means the file does not exist except in + # CVS's attic, so set the status accordingly + $status = "In-attic" + if $file =~ /^no file / + && $status eq 'Up-to-date'; + + $cvsstat{$fullname{$file}} = $status + if defined $fullname{$file}; + } + } +} + +# ... Validate that new files have the correct status +foreach my $f (@afiles) { + next unless defined(my $stat = $cvsstat{$f}); + + # This means the file has never been seen before + next if $stat eq 'Unknown'; + + # This means the file has been seen before but was removed + next if $stat eq 'In-attic'; + + $dirty = 1; + warn "File $f is already known in your CVS checkout -- perhaps it has been added by another user. Or this may indicate that it exists on a different branch. If this is the case, use -f to force the merge.\n"; + warn "Status was: $cvsstat{$f}\n"; +} + +# ... validate known files. +foreach my $f (@files) { + next if grep { $_ eq $f } @afiles; + # TODO:we need to handle removed in cvs + unless (defined ($cvsstat{$f}) and $cvsstat{$f} eq "Up-to-date") { + $dirty = 1; + warn "File $f not up to date but has status '$cvsstat{$f}' in your CVS checkout!\n"; + } + + # Depending on how your GIT tree got imported from CVS you may + # have a conflict between expanded keywords in your CVS tree and + # unexpanded keywords in the patch about to be applied. + if ($opt_k) { + my $orig_file ="$f.orig"; + rename $f, $orig_file; + open(FILTER_IN, "<$orig_file") or die "Cannot open $orig_file\n"; + open(FILTER_OUT, ">$f") or die "Cannot open $f\n"; + while () + { + my $line = $_; + $line =~ s/\$([A-Z][a-z]+):[^\$]+\$/\$$1\$/g; + print FILTER_OUT $line; + } + close FILTER_IN; + close FILTER_OUT; + } +} + +if ($dirty) { + if ($opt_f) { warn "The tree is not clean -- forced merge\n"; + $dirty = 0; + } else { + die "Exiting: your CVS tree is not clean for this merge."; + } +} + +print "Applying\n"; +if ($opt_W) { + system("git checkout -q $commit^0") && die "cannot patch"; +} else { + `GIT_DIR= git-apply $context --summary --numstat --apply <.cvsexportcommit.diff` || die "cannot patch"; +} + +print "Patch applied successfully. Adding new files and directories to CVS\n"; +my $dirtypatch = 0; + +# +# We have to add the directories in order otherwise we will have +# problems when we try and add the sub-directory of a directory we +# have not added yet. +# +# Luckily this is easy to deal with by sorting the directories and +# dealing with the shortest ones first. +# +@dirs = sort { length $a <=> length $b} @dirs; + +foreach my $d (@dirs) { + if (system(@cvs,'add',$d)) { + $dirtypatch = 1; + warn "Failed to cvs add directory $d -- you may need to do it manually"; + } +} + +foreach my $f (@afiles) { + if (grep { $_ eq $f } @bfiles) { + system(@cvs, 'add','-kb',$f); + } else { + system(@cvs, 'add', $f); + } + if ($?) { + $dirtypatch = 1; + warn "Failed to cvs add $f -- you may need to do it manually"; + } +} + +foreach my $f (@dfiles) { + system(@cvs, 'rm', '-f', $f); + if ($?) { + $dirtypatch = 1; + warn "Failed to cvs rm -f $f -- you may need to do it manually"; + } +} + +print "Commit to CVS\n"; +print "Patch title (first comment line): $title\n"; +my @commitfiles = map { unless (m/\s/) { '\''.$_.'\''; } else { $_; }; } (@files); +my $cmd = join(' ', @cvs)." commit -F .msg @commitfiles"; + +if ($dirtypatch) { + print "NOTE: One or more hunks failed to apply cleanly.\n"; + print "You'll need to apply the patch in .cvsexportcommit.diff manually\n"; + print "using a patch program. After applying the patch and resolving the\n"; + print "problems you may commit using:"; + print "\n cd \"$opt_w\"" if $opt_w; + print "\n $cmd\n"; + print "\n git checkout $go_back_to\n" if $go_back_to; + print "\n"; + exit(1); +} + +if ($opt_c) { + print "Autocommit\n $cmd\n"; + print xargs_safe_pipe_capture([@cvs, 'commit', '-F', '.msg'], @files); + if ($?) { + die "Exiting: The commit did not succeed"; + } + print "Committed successfully to CVS\n"; + # clean up + unlink(".msg"); +} else { + print "Ready for you to commit, just run:\n\n $cmd\n"; +} + +# clean up +unlink(".cvsexportcommit.diff"); + +if ($opt_W) { + system("git checkout $go_back_to") && die "cannot move back to $go_back_to"; + if (!($go_back_to =~ /^[0-9a-fA-F]{40}$/)) { + system("git symbolic-ref HEAD $go_back_to") && + die "cannot move back to $go_back_to"; + } +} + +# CVS version 1.11.x and 1.12.x sleeps the wrong way to ensure the timestamp +# used by CVS and the one set by subsequence file modifications are different. +# If they are not different CVS will not detect changes. +sleep(1); + +sub usage { + print STDERR <); + close $child or die join(' ',@_).": $! $?"; + } else { + exec(@_) or die "$! $?"; # exec() can fail the executable can't be found + } + return wantarray ? @output : join('',@output); +} + +sub xargs_safe_pipe_capture { + my $MAX_ARG_LENGTH = 65536; + my $cmd = shift; + my @output; + my $output; + while(@_) { + my @args; + my $length = 0; + while(@_ && $length < $MAX_ARG_LENGTH) { + push @args, shift; + $length += length($args[$#args]); + } + if (wantarray) { + push @output, safe_pipe_capture(@$cmd, @args); + } + else { + $output .= safe_pipe_capture(@$cmd, @args); + } + } + return wantarray ? @output : $output; +} diff --git a/SparkleShare/Mac/git/libexec/git-core/git-cvsimport b/SparkleShare/Mac/git/libexec/git-core/git-cvsimport new file mode 100755 index 00000000..cffa6e01 --- /dev/null +++ b/SparkleShare/Mac/git/libexec/git-core/git-cvsimport @@ -0,0 +1,1097 @@ +#!/usr/bin/perl +use lib (split(/:/, $ENV{GITPERLLIB} || "/usr/local/git/lib/perl5/site_perl")); + +# This tool is copyright (c) 2005, Matthias Urlichs. +# It is released under the Gnu Public License, version 2. +# +# The basic idea is to aggregate CVS check-ins into related changes. +# Fortunately, "cvsps" does that for us; all we have to do is to parse +# its output. +# +# Checking out the files is done by a single long-running CVS connection +# / server process. +# +# The head revision is on branch "origin" by default. +# You can change that with the '-o' option. + +use 5.008; +use strict; +use warnings; +use Getopt::Long; +use File::Spec; +use File::Temp qw(tempfile tmpnam); +use File::Path qw(mkpath); +use File::Basename qw(basename dirname); +use Time::Local; +use IO::Socket; +use IO::Pipe; +use POSIX qw(strftime dup2 ENOENT); +use IPC::Open2; + +$SIG{'PIPE'}="IGNORE"; +$ENV{'TZ'}="UTC"; + +our ($opt_h,$opt_o,$opt_v,$opt_k,$opt_u,$opt_d,$opt_p,$opt_C,$opt_z,$opt_i,$opt_P, $opt_s,$opt_m,@opt_M,$opt_A,$opt_S,$opt_L, $opt_a, $opt_r, $opt_R); +my (%conv_author_name, %conv_author_email); + +sub usage(;$) { + my $msg = shift; + print(STDERR "Error: $msg\n") if $msg; + print STDERR <) { + # Expected format is this: + # exon=Andreas Ericsson + if (m/^(\S+?)\s*=\s*(.+?)\s*<(.+)>\s*$/) { + $user = $1; + $conv_author_name{$user} = $2; + $conv_author_email{$user} = $3; + } + # However, we also read from CVSROOT/users format + # to ease migration. + elsif (/^(\w+):(['"]?)(.+?)\2\s*$/) { + my $mapped; + ($user, $mapped) = ($1, $3); + if ($mapped =~ /^\s*(.*?)\s*<(.*)>\s*$/) { + $conv_author_name{$user} = $1; + $conv_author_email{$user} = $2; + } + elsif ($mapped =~ /^?$/) { + $conv_author_name{$user} = $user; + $conv_author_email{$user} = $1; + } + } + # NEEDSWORK: Maybe warn on unrecognized lines? + } + close ($f); +} + +sub write_author_info($) { + my ($file) = @_; + open my $f, '>', $file or + die("Failed to open $file for writing: $!"); + + foreach (keys %conv_author_name) { + print $f "$_=$conv_author_name{$_} <$conv_author_email{$_}>\n"; + } + close ($f); +} + +# convert getopts specs for use by git config +my %longmap = ( + 'A:' => 'authors-file', + 'M:' => 'merge-regex', + 'P:' => undef, + 'R' => 'track-revisions', + 'S:' => 'ignore-paths', +); + +sub read_repo_config { + # Split the string between characters, unless there is a ':' + # So "abc:de" becomes ["a", "b", "c:", "d", "e"] + my @opts = split(/ *(?!:)/, shift); + foreach my $o (@opts) { + my $key = $o; + $key =~ s/://g; + my $arg = 'git config'; + $arg .= ' --bool' if ($o !~ /:$/); + my $ckey = $key; + + if (exists $longmap{$o}) { + # An uppercase option like -R cannot be + # expressed in the configuration, as the + # variable names are downcased. + $ckey = $longmap{$o}; + next if (! defined $ckey); + $ckey =~ s/-//g; + } + chomp(my $tmp = `$arg --get cvsimport.$ckey`); + if ($tmp && !($arg =~ /--bool/ && $tmp eq 'false')) { + no strict 'refs'; + my $opt_name = "opt_" . $key; + if (!$$opt_name) { + $$opt_name = $tmp; + } + } + } +} + +my $opts = "haivmkuo:d:p:r:C:z:s:M:P:A:S:L:R"; +read_repo_config($opts); +Getopt::Long::Configure( 'no_ignore_case', 'bundling' ); + +# turn the Getopt::Std specification in a Getopt::Long one, +# with support for multiple -M options +GetOptions( map { s/:/=s/; /M/ ? "$_\@" : $_ } split( /(?!:)/, $opts ) ) + or usage(); +usage if $opt_h; + +if (@ARGV == 0) { + chomp(my $module = `git config --get cvsimport.module`); + push(@ARGV, $module) if $? == 0; +} +@ARGV <= 1 or usage("You can't specify more than one CVS module"); + +if ($opt_d) { + $ENV{"CVSROOT"} = $opt_d; +} elsif (-f 'CVS/Root') { + open my $f, '<', 'CVS/Root' or die 'Failed to open CVS/Root'; + $opt_d = <$f>; + chomp $opt_d; + close $f; + $ENV{"CVSROOT"} = $opt_d; +} elsif ($ENV{"CVSROOT"}) { + $opt_d = $ENV{"CVSROOT"}; +} else { + usage("CVSROOT needs to be set"); +} +$opt_s ||= "-"; +$opt_a ||= 0; + +my $git_tree = $opt_C; +$git_tree ||= "."; + +my $remote; +if (defined $opt_r) { + $remote = 'refs/remotes/' . $opt_r; + $opt_o ||= "master"; +} else { + $opt_o ||= "origin"; + $remote = 'refs/heads'; +} + +my $cvs_tree; +if ($#ARGV == 0) { + $cvs_tree = $ARGV[0]; +} elsif (-f 'CVS/Repository') { + open my $f, '<', 'CVS/Repository' or + die 'Failed to open CVS/Repository'; + $cvs_tree = <$f>; + chomp $cvs_tree; + close $f; +} else { + usage("CVS module has to be specified"); +} + +our @mergerx = (); +if ($opt_m) { + @mergerx = ( qr/\b(?:from|of|merge|merging|merged) ([-\w]+)/i ); +} +if (@opt_M) { + push (@mergerx, map { qr/$_/ } @opt_M); +} + +# Remember UTC of our starting time +# we'll want to avoid importing commits +# that are too recent +our $starttime = time(); + +select(STDERR); $|=1; select(STDOUT); + + +package CVSconn; +# Basic CVS dialog. +# We're only interested in connecting and downloading, so ... + +use File::Spec; +use File::Temp qw(tempfile); +use POSIX qw(strftime dup2); + +sub new { + my ($what,$repo,$subdir) = @_; + $what=ref($what) if ref($what); + + my $self = {}; + $self->{'buffer'} = ""; + bless($self,$what); + + $repo =~ s#/+$##; + $self->{'fullrep'} = $repo; + $self->conn(); + + $self->{'subdir'} = $subdir; + $self->{'lines'} = undef; + + return $self; +} + +sub conn { + my $self = shift; + my $repo = $self->{'fullrep'}; + if ($repo =~ s/^:pserver(?:([^:]*)):(?:(.*?)(?::(.*?))?@)?([^:\/]*)(?::(\d*))?//) { + my ($param,$user,$pass,$serv,$port) = ($1,$2,$3,$4,$5); + + my ($proxyhost,$proxyport); + if ($param && ($param =~ m/proxy=([^;]+)/)) { + $proxyhost = $1; + # Default proxyport, if not specified, is 8080. + $proxyport = 8080; + if ($ENV{"CVS_PROXY_PORT"}) { + $proxyport = $ENV{"CVS_PROXY_PORT"}; + } + if ($param =~ m/proxyport=([^;]+)/) { + $proxyport = $1; + } + } + $repo ||= '/'; + + # if username is not explicit in CVSROOT, then use current user, as cvs would + $user=(getlogin() || $ENV{'LOGNAME'} || $ENV{'USER'} || "anonymous") unless $user; + my $rr2 = "-"; + unless ($port) { + $rr2 = ":pserver:$user\@$serv:$repo"; + $port=2401; + } + my $rr = ":pserver:$user\@$serv:$port$repo"; + + if ($pass) { + $pass = $self->_scramble($pass); + } else { + open(H,$ENV{'HOME'}."/.cvspass") and do { + # :pserver:cvs@mea.tmt.tele.fi:/cvsroot/zmailer Ah) { + chomp; + s/^\/\d+\s+//; + my ($w,$p) = split(/\s/,$_,2); + if ($w eq $rr or $w eq $rr2) { + $pass = $p; + last; + } + } + }; + $pass = "A" unless $pass; + } + + my ($s, $rep); + if ($proxyhost) { + + # Use a HTTP Proxy. Only works for HTTP proxies that + # don't require user authentication + # + # See: http://www.ietf.org/rfc/rfc2817.txt + + $s = IO::Socket::INET->new(PeerHost => $proxyhost, PeerPort => $proxyport); + die "Socket to $proxyhost: $!\n" unless defined $s; + $s->write("CONNECT $serv:$port HTTP/1.1\r\nHost: $serv:$port\r\n\r\n") + or die "Write to $proxyhost: $!\n"; + $s->flush(); + + $rep = <$s>; + + # The answer should look like 'HTTP/1.x 2yy ....' + if (!($rep =~ m#^HTTP/1\.. 2[0-9][0-9]#)) { + die "Proxy connect: $rep\n"; + } + # Skip up to the empty line of the proxy server output + # including the response headers. + while ($rep = <$s>) { + last if (!defined $rep || + $rep eq "\n" || + $rep eq "\r\n"); + } + } else { + $s = IO::Socket::INET->new(PeerHost => $serv, PeerPort => $port); + die "Socket to $serv: $!\n" unless defined $s; + } + + $s->write("BEGIN AUTH REQUEST\n$repo\n$user\n$pass\nEND AUTH REQUEST\n") + or die "Write to $serv: $!\n"; + $s->flush(); + + $rep = <$s>; + + if ($rep ne "I LOVE YOU\n") { + $rep="" unless $rep; + die "AuthReply: $rep\n"; + } + $self->{'socketo'} = $s; + $self->{'socketi'} = $s; + } else { # local or ext: Fork off our own cvs server. + my $pr = IO::Pipe->new(); + my $pw = IO::Pipe->new(); + my $pid = fork(); + die "Fork: $!\n" unless defined $pid; + my $cvs = 'cvs'; + $cvs = $ENV{CVS_SERVER} if exists $ENV{CVS_SERVER}; + my $rsh = 'rsh'; + $rsh = $ENV{CVS_RSH} if exists $ENV{CVS_RSH}; + + my @cvs = ($cvs, 'server'); + my ($local, $user, $host); + $local = $repo =~ s/:local://; + if (!$local) { + $repo =~ s/:ext://; + $local = !($repo =~ s/^(?:([^\@:]+)\@)?([^:]+)://); + ($user, $host) = ($1, $2); + } + if (!$local) { + if ($user) { + unshift @cvs, $rsh, '-l', $user, $host; + } else { + unshift @cvs, $rsh, $host; + } + } + + unless ($pid) { + $pr->writer(); + $pw->reader(); + dup2($pw->fileno(),0); + dup2($pr->fileno(),1); + $pr->close(); + $pw->close(); + exec(@cvs); + } + $pw->writer(); + $pr->reader(); + $self->{'socketo'} = $pw; + $self->{'socketi'} = $pr; + } + $self->{'socketo'}->write("Root $repo\n"); + + # Trial and error says that this probably is the minimum set + $self->{'socketo'}->write("Valid-responses ok error Valid-requests Mode M Mbinary E Checked-in Created Updated Merged Removed\n"); + + $self->{'socketo'}->write("valid-requests\n"); + $self->{'socketo'}->flush(); + + chomp(my $rep=$self->readline()); + if ($rep !~ s/^Valid-requests\s*//) { + $rep="" unless $rep; + die "Expected Valid-requests from server, but got: $rep\n"; + } + chomp(my $res=$self->readline()); + die "validReply: $res\n" if $res ne "ok"; + + $self->{'socketo'}->write("UseUnchanged\n") if $rep =~ /\bUseUnchanged\b/; + $self->{'repo'} = $repo; +} + +sub readline { + my ($self) = @_; + return $self->{'socketi'}->getline(); +} + +sub _file { + # Request a file with a given revision. + # Trial and error says this is a good way to do it. :-/ + my ($self,$fn,$rev) = @_; + $self->{'socketo'}->write("Argument -N\n") or return undef; + $self->{'socketo'}->write("Argument -P\n") or return undef; + # -kk: Linus' version doesn't use it - defaults to off + if ($opt_k) { + $self->{'socketo'}->write("Argument -kk\n") or return undef; + } + $self->{'socketo'}->write("Argument -r\n") or return undef; + $self->{'socketo'}->write("Argument $rev\n") or return undef; + $self->{'socketo'}->write("Argument --\n") or return undef; + $self->{'socketo'}->write("Argument $self->{'subdir'}/$fn\n") or return undef; + $self->{'socketo'}->write("Directory .\n") or return undef; + $self->{'socketo'}->write("$self->{'repo'}\n") or return undef; + # $self->{'socketo'}->write("Sticky T1.0\n") or return undef; + $self->{'socketo'}->write("co\n") or return undef; + $self->{'socketo'}->flush() or return undef; + $self->{'lines'} = 0; + return 1; +} +sub _line { + # Read a line from the server. + # ... except that 'line' may be an entire file. ;-) + my ($self, $fh) = @_; + die "Not in lines" unless defined $self->{'lines'}; + + my $line; + my $res=0; + while (defined($line = $self->readline())) { + # M U gnupg-cvs-rep/AUTHORS + # Updated gnupg-cvs-rep/ + # /daten/src/rsync/gnupg-cvs-rep/AUTHORS + # /AUTHORS/1.1///T1.1 + # u=rw,g=rw,o=rw + # 0 + # ok + + if ($line =~ s/^(?:Created|Updated) //) { + $line = $self->readline(); # path + $line = $self->readline(); # Entries line + my $mode = $self->readline(); chomp $mode; + $self->{'mode'} = $mode; + defined (my $cnt = $self->readline()) + or die "EOF from server after 'Changed'\n"; + chomp $cnt; + die "Duh: Filesize $cnt" if $cnt !~ /^\d+$/; + $line=""; + $res = $self->_fetchfile($fh, $cnt); + } elsif ($line =~ s/^ //) { + print $fh $line; + $res += length($line); + } elsif ($line =~ /^M\b/) { + # output, do nothing + } elsif ($line =~ /^Mbinary\b/) { + my $cnt; + die "EOF from server after 'Mbinary'" unless defined ($cnt = $self->readline()); + chomp $cnt; + die "Duh: Mbinary $cnt" if $cnt !~ /^\d+$/ or $cnt<1; + $line=""; + $res += $self->_fetchfile($fh, $cnt); + } else { + chomp $line; + if ($line eq "ok") { + # print STDERR "S: ok (".length($res).")\n"; + return $res; + } elsif ($line =~ s/^E //) { + # print STDERR "S: $line\n"; + } elsif ($line =~ /^(Remove-entry|Removed) /i) { + $line = $self->readline(); # filename + $line = $self->readline(); # OK + chomp $line; + die "Unknown: $line" if $line ne "ok"; + return -1; + } else { + die "Unknown: $line\n"; + } + } + } + return undef; +} +sub file { + my ($self,$fn,$rev) = @_; + my $res; + + my ($fh, $name) = tempfile('gitcvs.XXXXXX', + DIR => File::Spec->tmpdir(), UNLINK => 1); + + $self->_file($fn,$rev) and $res = $self->_line($fh); + + if (!defined $res) { + print STDERR "Server has gone away while fetching $fn $rev, retrying...\n"; + truncate $fh, 0; + $self->conn(); + $self->_file($fn,$rev) or die "No file command send"; + $res = $self->_line($fh); + die "Retry failed" unless defined $res; + } + close ($fh); + + return ($name, $res); +} +sub _fetchfile { + my ($self, $fh, $cnt) = @_; + my $res = 0; + my $bufsize = 1024 * 1024; + while ($cnt) { + if ($bufsize > $cnt) { + $bufsize = $cnt; + } + my $buf; + my $num = $self->{'socketi'}->read($buf,$bufsize); + die "Server: Filesize $cnt: $num: $!\n" if not defined $num or $num<=0; + print $fh $buf; + $res += $num; + $cnt -= $num; + } + return $res; +} + +sub _scramble { + my ($self, $pass) = @_; + my $scrambled = "A"; + + return $scrambled unless $pass; + + my $pass_len = length($pass); + my @pass_arr = split("", $pass); + my $i; + + # from cvs/src/scramble.c + my @shifts = ( + 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, + 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, + 114,120, 53, 79, 96,109, 72,108, 70, 64, 76, 67,116, 74, 68, 87, + 111, 52, 75,119, 49, 34, 82, 81, 95, 65,112, 86,118,110,122,105, + 41, 57, 83, 43, 46,102, 40, 89, 38,103, 45, 50, 42,123, 91, 35, + 125, 55, 54, 66,124,126, 59, 47, 92, 71,115, 78, 88,107,106, 56, + 36,121,117,104,101,100, 69, 73, 99, 63, 94, 93, 39, 37, 61, 48, + 58,113, 32, 90, 44, 98, 60, 51, 33, 97, 62, 77, 84, 80, 85,223, + 225,216,187,166,229,189,222,188,141,249,148,200,184,136,248,190, + 199,170,181,204,138,232,218,183,255,234,220,247,213,203,226,193, + 174,172,228,252,217,201,131,230,197,211,145,238,161,179,160,212, + 207,221,254,173,202,146,224,151,140,196,205,130,135,133,143,246, + 192,159,244,239,185,168,215,144,139,165,180,157,147,186,214,176, + 227,231,219,169,175,156,206,198,129,164,150,210,154,177,134,127, + 182,128,158,208,162,132,167,209,149,241,153,251,237,236,171,195, + 243,233,253,240,194,250,191,155,142,137,245,235,163,242,178,152 + ); + + for ($i = 0; $i < $pass_len; $i++) { + $scrambled .= pack("C", $shifts[ord($pass_arr[$i])]); + } + + return $scrambled; +} + +package main; + +my $cvs = CVSconn->new($opt_d, $cvs_tree); + + +sub pdate($) { + my ($d) = @_; + m#(\d{2,4})/(\d\d)/(\d\d)\s(\d\d):(\d\d)(?::(\d\d))?# + or die "Unparseable date: $d\n"; + my $y=$1; $y-=1900 if $y>1900; + return timegm($6||0,$5,$4,$3,$2-1,$y); +} + +sub pmode($) { + my ($mode) = @_; + my $m = 0; + my $mm = 0; + my $um = 0; + for my $x(split(//,$mode)) { + if ($x eq ",") { + $m |= $mm&$um; + $mm = 0; + $um = 0; + } elsif ($x eq "u") { $um |= 0700; + } elsif ($x eq "g") { $um |= 0070; + } elsif ($x eq "o") { $um |= 0007; + } elsif ($x eq "r") { $mm |= 0444; + } elsif ($x eq "w") { $mm |= 0222; + } elsif ($x eq "x") { $mm |= 0111; + } elsif ($x eq "=") { # do nothing + } else { die "Unknown mode: $mode\n"; + } + } + $m |= $mm&$um; + return $m; +} + +sub getwd() { + my $pwd = `pwd`; + chomp $pwd; + return $pwd; +} + +sub is_sha1 { + my $s = shift; + return $s =~ /^[a-f0-9]{40}$/; +} + +sub get_headref ($) { + my $name = shift; + my $r = `git rev-parse --verify '$name' 2>/dev/null`; + return undef unless $? == 0; + chomp $r; + return $r; +} + +my $user_filename_prepend = ''; +sub munge_user_filename { + my $name = shift; + return File::Spec->file_name_is_absolute($name) ? + $name : + $user_filename_prepend . $name; +} + +-d $git_tree + or mkdir($git_tree,0777) + or die "Could not create $git_tree: $!"; +if ($git_tree ne '.') { + $user_filename_prepend = getwd() . '/'; + chdir($git_tree); +} + +my $last_branch = ""; +my $orig_branch = ""; +my %branch_date; +my $tip_at_start = undef; + +my $git_dir = $ENV{"GIT_DIR"} || ".git"; +$git_dir = getwd()."/".$git_dir unless $git_dir =~ m#^/#; +$ENV{"GIT_DIR"} = $git_dir; +my $orig_git_index; +$orig_git_index = $ENV{GIT_INDEX_FILE} if exists $ENV{GIT_INDEX_FILE}; + +my %index; # holds filenames of one index per branch + +unless (-d $git_dir) { + system(qw(git init)); + die "Cannot init the GIT db at $git_tree: $?\n" if $?; + system(qw(git read-tree --empty)); + die "Cannot init an empty tree: $?\n" if $?; + + $last_branch = $opt_o; + $orig_branch = ""; +} else { + open(F, "-|", qw(git symbolic-ref HEAD)) or + die "Cannot run git symbolic-ref: $!\n"; + chomp ($last_branch = ); + $last_branch = basename($last_branch); + close(F); + unless ($last_branch) { + warn "Cannot read the last branch name: $! -- assuming 'master'\n"; + $last_branch = "master"; + } + $orig_branch = $last_branch; + $tip_at_start = `git rev-parse --verify HEAD`; + + # Get the last import timestamps + my $fmt = '($ref, $author) = (%(refname), %(author));'; + my @cmd = ('git', 'for-each-ref', '--perl', "--format=$fmt", $remote); + open(H, "-|", @cmd) or die "Cannot run git for-each-ref: $!\n"; + while (defined(my $entry = )) { + my ($ref, $author); + eval($entry) || die "cannot eval refs list: $@"; + my ($head) = ($ref =~ m|^$remote/(.*)|); + $author =~ /^.*\s(\d+)\s[-+]\d{4}$/; + $branch_date{$head} = $1; + } + close(H); + if (!exists $branch_date{$opt_o}) { + die "Branch '$opt_o' does not exist.\n". + "Either use the correct '-o branch' option,\n". + "or import to a new repository.\n"; + } +} + +-d $git_dir + or die "Could not create git subdir ($git_dir).\n"; + +# now we read (and possibly save) author-info as well +-f "$git_dir/cvs-authors" and + read_author_info("$git_dir/cvs-authors"); +if ($opt_A) { + read_author_info(munge_user_filename($opt_A)); + write_author_info("$git_dir/cvs-authors"); +} + +# open .git/cvs-revisions, if requested +open my $revision_map, '>>', "$git_dir/cvs-revisions" + or die "Can't open $git_dir/cvs-revisions for appending: $!\n" + if defined $opt_R; + + +# +# run cvsps into a file unless we are getting +# it passed as a file via $opt_P +# +my $cvspsfile; +unless ($opt_P) { + print "Running cvsps...\n" if $opt_v; + my $pid = open(CVSPS,"-|"); + my $cvspsfh; + die "Cannot fork: $!\n" unless defined $pid; + unless ($pid) { + my @opt; + @opt = split(/,/,$opt_p) if defined $opt_p; + unshift @opt, '-z', $opt_z if defined $opt_z; + unshift @opt, '-q' unless defined $opt_v; + unless (defined($opt_p) && $opt_p =~ m/--no-cvs-direct/) { + push @opt, '--cvs-direct'; + } + exec("cvsps","--norc",@opt,"-u","-A",'--root',$opt_d,$cvs_tree); + die "Could not start cvsps: $!\n"; + } + ($cvspsfh, $cvspsfile) = tempfile('gitXXXXXX', SUFFIX => '.cvsps', + DIR => File::Spec->tmpdir()); + while () { + print $cvspsfh $_; + } + close CVSPS; + $? == 0 or die "git cvsimport: fatal: cvsps reported error\n"; + close $cvspsfh; +} else { + $cvspsfile = munge_user_filename($opt_P); +} + +open(CVS, "<$cvspsfile") or die $!; + +## cvsps output: +#--------------------- +#PatchSet 314 +#Date: 1999/09/18 13:03:59 +#Author: wkoch +#Branch: STABLE-BRANCH-1-0 +#Ancestor branch: HEAD +#Tag: (none) +#Log: +# See ChangeLog: Sat Sep 18 13:03:28 CEST 1999 Werner Koch +#Members: +# README:1.57->1.57.2.1 +# VERSION:1.96->1.96.2.1 +# +#--------------------- + +my $state = 0; + +sub update_index (\@\@) { + my $old = shift; + my $new = shift; + open(my $fh, '|-', qw(git update-index -z --index-info)) + or die "unable to open git update-index: $!"; + print $fh + (map { "0 0000000000000000000000000000000000000000\t$_\0" } + @$old), + (map { '100' . sprintf('%o', $_->[0]) . " $_->[1]\t$_->[2]\0" } + @$new) + or die "unable to write to git update-index: $!"; + close $fh + or die "unable to write to git update-index: $!"; + $? and die "git update-index reported error: $?"; +} + +sub write_tree () { + open(my $fh, '-|', qw(git write-tree)) + or die "unable to open git write-tree: $!"; + chomp(my $tree = <$fh>); + is_sha1($tree) + or die "Cannot get tree id ($tree): $!"; + close($fh) + or die "Error running git write-tree: $?\n"; + print "Tree ID $tree\n" if $opt_v; + return $tree; +} + +my ($patchset,$date,$author_name,$author_email,$branch,$ancestor,$tag,$logmsg); +my (@old,@new,@skipped,%ignorebranch,@commit_revisions); + +# commits that cvsps cannot place anywhere... +$ignorebranch{'#CVSPS_NO_BRANCH'} = 1; + +sub commit { + if ($branch eq $opt_o && !$index{branch} && + !get_headref("$remote/$branch")) { + # looks like an initial commit + # use the index primed by git init + $ENV{GIT_INDEX_FILE} = "$git_dir/index"; + $index{$branch} = "$git_dir/index"; + } else { + # use an index per branch to speed up + # imports of projects with many branches + unless ($index{$branch}) { + $index{$branch} = tmpnam(); + $ENV{GIT_INDEX_FILE} = $index{$branch}; + if ($ancestor) { + system("git", "read-tree", "$remote/$ancestor"); + } else { + system("git", "read-tree", "$remote/$branch"); + } + die "read-tree failed: $?\n" if $?; + } + } + $ENV{GIT_INDEX_FILE} = $index{$branch}; + + update_index(@old, @new); + @old = @new = (); + my $tree = write_tree(); + my $parent = get_headref("$remote/$last_branch"); + print "Parent ID " . ($parent ? $parent : "(empty)") . "\n" if $opt_v; + + my @commit_args; + push @commit_args, ("-p", $parent) if $parent; + + # loose detection of merges + # based on the commit msg + foreach my $rx (@mergerx) { + next unless $logmsg =~ $rx && $1; + my $mparent = $1 eq 'HEAD' ? $opt_o : $1; + if (my $sha1 = get_headref("$remote/$mparent")) { + push @commit_args, '-p', "$remote/$mparent"; + print "Merge parent branch: $mparent\n" if $opt_v; + } + } + + my $commit_date = strftime("+0000 %Y-%m-%d %H:%M:%S",gmtime($date)); + $ENV{GIT_AUTHOR_NAME} = $author_name; + $ENV{GIT_AUTHOR_EMAIL} = $author_email; + $ENV{GIT_AUTHOR_DATE} = $commit_date; + $ENV{GIT_COMMITTER_NAME} = $author_name; + $ENV{GIT_COMMITTER_EMAIL} = $author_email; + $ENV{GIT_COMMITTER_DATE} = $commit_date; + my $pid = open2(my $commit_read, my $commit_write, + 'git', 'commit-tree', $tree, @commit_args); + + # compatibility with git2cvs + substr($logmsg,32767) = "" if length($logmsg) > 32767; + $logmsg =~ s/[\s\n]+\z//; + + if (@skipped) { + $logmsg .= "\n\n\nSKIPPED:\n\t"; + $logmsg .= join("\n\t", @skipped) . "\n"; + @skipped = (); + } + + print($commit_write "$logmsg\n") && close($commit_write) + or die "Error writing to git commit-tree: $!\n"; + + print "Committed patch $patchset ($branch $commit_date)\n" if $opt_v; + chomp(my $cid = <$commit_read>); + is_sha1($cid) or die "Cannot get commit id ($cid): $!\n"; + print "Commit ID $cid\n" if $opt_v; + close($commit_read); + + waitpid($pid,0); + die "Error running git commit-tree: $?\n" if $?; + + system('git' , 'update-ref', "$remote/$branch", $cid) == 0 + or die "Cannot write branch $branch for update: $!\n"; + + if ($revision_map) { + print $revision_map "@$_ $cid\n" for @commit_revisions; + } + @commit_revisions = (); + + if ($tag) { + my ($xtag) = $tag; + $xtag =~ s/\s+\*\*.*$//; # Remove stuff like ** INVALID ** and ** FUNKY ** + $xtag =~ tr/_/\./ if ( $opt_u ); + $xtag =~ s/[\/]/$opt_s/g; + $xtag =~ s/\[//g; + + system('git' , 'tag', '-f', $xtag, $cid) == 0 + or die "Cannot create tag $xtag: $!\n"; + + print "Created tag '$xtag' on '$branch'\n" if $opt_v; + } +}; + +my $commitcount = 1; +while () { + chomp; + if ($state == 0 and /^-+$/) { + $state = 1; + } elsif ($state == 0) { + $state = 1; + redo; + } elsif (($state==0 or $state==1) and s/^PatchSet\s+//) { + $patchset = 0+$_; + $state=2; + } elsif ($state == 2 and s/^Date:\s+//) { + $date = pdate($_); + unless ($date) { + print STDERR "Could not parse date: $_\n"; + $state=0; + next; + } + $state=3; + } elsif ($state == 3 and s/^Author:\s+//) { + s/\s+$//; + if (/^(.*?)\s+<(.*)>/) { + ($author_name, $author_email) = ($1, $2); + } elsif ($conv_author_name{$_}) { + $author_name = $conv_author_name{$_}; + $author_email = $conv_author_email{$_}; + } else { + $author_name = $author_email = $_; + } + $state = 4; + } elsif ($state == 4 and s/^Branch:\s+//) { + s/\s+$//; + tr/_/\./ if ( $opt_u ); + s/[\/]/$opt_s/g; + $branch = $_; + $state = 5; + } elsif ($state == 5 and s/^Ancestor branch:\s+//) { + s/\s+$//; + $ancestor = $_; + $ancestor = $opt_o if $ancestor eq "HEAD"; + $state = 6; + } elsif ($state == 5) { + $ancestor = undef; + $state = 6; + redo; + } elsif ($state == 6 and s/^Tag:\s+//) { + s/\s+$//; + if ($_ eq "(none)") { + $tag = undef; + } else { + $tag = $_; + } + $state = 7; + } elsif ($state == 7 and /^Log:/) { + $logmsg = ""; + $state = 8; + } elsif ($state == 8 and /^Members:/) { + $branch = $opt_o if $branch eq "HEAD"; + if (defined $branch_date{$branch} and $branch_date{$branch} >= $date) { + # skip + print "skip patchset $patchset: $date before $branch_date{$branch}\n" if $opt_v; + $state = 11; + next; + } + if (!$opt_a && $starttime - 300 - (defined $opt_z ? $opt_z : 300) <= $date) { + # skip if the commit is too recent + # given that the cvsps default fuzz is 300s, we give ourselves another + # 300s just in case -- this also prevents skipping commits + # due to server clock drift + print "skip patchset $patchset: $date too recent\n" if $opt_v; + $state = 11; + next; + } + if (exists $ignorebranch{$branch}) { + print STDERR "Skipping $branch\n"; + $state = 11; + next; + } + if ($ancestor) { + if ($ancestor eq $branch) { + print STDERR "Branch $branch erroneously stems from itself -- changed ancestor to $opt_o\n"; + $ancestor = $opt_o; + } + if (defined get_headref("$remote/$branch")) { + print STDERR "Branch $branch already exists!\n"; + $state=11; + next; + } + my $id = get_headref("$remote/$ancestor"); + if (!$id) { + print STDERR "Branch $ancestor does not exist!\n"; + $ignorebranch{$branch} = 1; + $state=11; + next; + } + + system(qw(git update-ref -m cvsimport), + "$remote/$branch", $id); + if($? != 0) { + print STDERR "Could not create branch $branch\n"; + $ignorebranch{$branch} = 1; + $state=11; + next; + } + } + $last_branch = $branch if $branch ne $last_branch; + $state = 9; + } elsif ($state == 8) { + $logmsg .= "$_\n"; + } elsif ($state == 9 and /^\s+(.+?):(INITIAL|\d+(?:\.\d+)+)->(\d+(?:\.\d+)+)\s*$/) { +# VERSION:1.96->1.96.2.1 + my $init = ($2 eq "INITIAL"); + my $fn = $1; + my $rev = $3; + $fn =~ s#^/+##; + if ($opt_S && $fn =~ m/$opt_S/) { + print "SKIPPING $fn v $rev\n"; + push(@skipped, $fn); + next; + } + push @commit_revisions, [$fn, $rev]; + print "Fetching $fn v $rev\n" if $opt_v; + my ($tmpname, $size) = $cvs->file($fn,$rev); + if ($size == -1) { + push(@old,$fn); + print "Drop $fn\n" if $opt_v; + } else { + print "".($init ? "New" : "Update")." $fn: $size bytes\n" if $opt_v; + my $pid = open(my $F, '-|'); + die $! unless defined $pid; + if (!$pid) { + exec("git", "hash-object", "-w", $tmpname) + or die "Cannot create object: $!\n"; + } + my $sha = <$F>; + chomp $sha; + close $F; + my $mode = pmode($cvs->{'mode'}); + push(@new,[$mode, $sha, $fn]); # may be resurrected! + } + unlink($tmpname); + } elsif ($state == 9 and /^\s+(.+?):\d+(?:\.\d+)+->(\d+(?:\.\d+)+)\(DEAD\)\s*$/) { + my $fn = $1; + my $rev = $2; + $fn =~ s#^/+##; + push @commit_revisions, [$fn, $rev]; + push(@old,$fn); + print "Delete $fn\n" if $opt_v; + } elsif ($state == 9 and /^\s*$/) { + $state = 10; + } elsif (($state == 9 or $state == 10) and /^-+$/) { + $commitcount++; + if ($opt_L && $commitcount > $opt_L) { + last; + } + commit(); + if (($commitcount & 1023) == 0) { + system(qw(git repack -a -d)); + } + $state = 1; + } elsif ($state == 11 and /^-+$/) { + $state = 1; + } elsif (/^-+$/) { # end of unknown-line processing + $state = 1; + } elsif ($state != 11) { # ignore stuff when skipping + print STDERR "* UNKNOWN LINE * $_\n"; + } +} +commit() if $branch and $state != 11; + +unless ($opt_P) { + unlink($cvspsfile); +} + +# The heuristic of repacking every 1024 commits can leave a +# lot of unpacked data. If there is more than 1MB worth of +# not-packed objects, repack once more. +my $line = `git count-objects`; +if ($line =~ /^(\d+) objects, (\d+) kilobytes$/) { + my ($n_objects, $kb) = ($1, $2); + 1024 < $kb + and system(qw(git repack -a -d)); +} + +foreach my $git_index (values %index) { + if ($git_index ne "$git_dir/index") { + unlink($git_index); + } +} + +if (defined $orig_git_index) { + $ENV{GIT_INDEX_FILE} = $orig_git_index; +} else { + delete $ENV{GIT_INDEX_FILE}; +} + +# Now switch back to the branch we were in before all of this happened +if ($orig_branch) { + print "DONE.\n" if $opt_v; + if ($opt_i) { + exit 0; + } + my $tip_at_end = `git rev-parse --verify HEAD`; + if ($tip_at_start ne $tip_at_end) { + for ($tip_at_start, $tip_at_end) { chomp; } + print "Fetched into the current branch.\n" if $opt_v; + system(qw(git read-tree -u -m), + $tip_at_start, $tip_at_end); + die "Fast-forward update failed: $?\n" if $?; + } + else { + system(qw(git merge cvsimport HEAD), "$remote/$opt_o"); + die "Could not merge $opt_o into the current branch.\n" if $?; + } +} else { + $orig_branch = "master"; + print "DONE; creating $orig_branch branch\n" if $opt_v; + system("git", "update-ref", "refs/heads/master", "$remote/$opt_o") + unless defined get_headref('refs/heads/master'); + system("git", "symbolic-ref", "$remote/HEAD", "$remote/$opt_o") + if ($opt_r && $opt_o ne 'HEAD'); + system('git', 'update-ref', 'HEAD', "$orig_branch"); + unless ($opt_i) { + system(qw(git checkout -f)); + die "checkout failed: $?\n" if $?; + } +} diff --git a/SparkleShare/Mac/git/libexec/git-core/git-cvsserver b/SparkleShare/Mac/git/libexec/git-core/git-cvsserver new file mode 100755 index 00000000..49d193f9 --- /dev/null +++ b/SparkleShare/Mac/git/libexec/git-core/git-cvsserver @@ -0,0 +1,3696 @@ +#!/usr/bin/perl +use lib (split(/:/, $ENV{GITPERLLIB} || "/usr/local/git/lib/perl5/site_perl")); + +#### +#### This application is a CVS emulation layer for git. +#### It is intended for clients to connect over SSH. +#### See the documentation for more details. +#### +#### Copyright The Open University UK - 2006. +#### +#### Authors: Martyn Smith +#### Martin Langhoff +#### +#### +#### Released under the GNU Public License, version 2. +#### +#### + +use 5.008; +use strict; +use warnings; +use bytes; + +use Fcntl; +use File::Temp qw/tempdir tempfile/; +use File::Path qw/rmtree/; +use File::Basename; +use Getopt::Long qw(:config require_order no_ignore_case); + +my $VERSION = '1.7.4'; + +my $log = GITCVS::log->new(); +my $cfg; + +my $DATE_LIST = { + Jan => "01", + Feb => "02", + Mar => "03", + Apr => "04", + May => "05", + Jun => "06", + Jul => "07", + Aug => "08", + Sep => "09", + Oct => "10", + Nov => "11", + Dec => "12", +}; + +# Enable autoflush for STDOUT (otherwise the whole thing falls apart) +$| = 1; + +#### Definition and mappings of functions #### + +my $methods = { + 'Root' => \&req_Root, + 'Valid-responses' => \&req_Validresponses, + 'valid-requests' => \&req_validrequests, + 'Directory' => \&req_Directory, + 'Entry' => \&req_Entry, + 'Modified' => \&req_Modified, + 'Unchanged' => \&req_Unchanged, + 'Questionable' => \&req_Questionable, + 'Argument' => \&req_Argument, + 'Argumentx' => \&req_Argument, + 'expand-modules' => \&req_expandmodules, + 'add' => \&req_add, + 'remove' => \&req_remove, + 'co' => \&req_co, + 'update' => \&req_update, + 'ci' => \&req_ci, + 'diff' => \&req_diff, + 'log' => \&req_log, + 'rlog' => \&req_log, + 'tag' => \&req_CATCHALL, + 'status' => \&req_status, + 'admin' => \&req_CATCHALL, + 'history' => \&req_CATCHALL, + 'watchers' => \&req_EMPTY, + 'editors' => \&req_EMPTY, + 'noop' => \&req_EMPTY, + 'annotate' => \&req_annotate, + 'Global_option' => \&req_Globaloption, + #'annotate' => \&req_CATCHALL, +}; + +############################################## + + +# $state holds all the bits of information the clients sends us that could +# potentially be useful when it comes to actually _doing_ something. +my $state = { prependdir => '' }; + +# Work is for managing temporary working directory +my $work = + { + state => undef, # undef, 1 (empty), 2 (with stuff) + workDir => undef, + index => undef, + emptyDir => undef, + tmpDir => undef + }; + +$log->info("--------------- STARTING -----------------"); + +my $usage = + "Usage: git cvsserver [options] [pserver|server] [ ...]\n". + " --base-path : Prepend to requested CVSROOT\n". + " Can be read from GIT_CVSSERVER_BASE_PATH\n". + " --strict-paths : Don't allow recursing into subdirectories\n". + " --export-all : Don't check for gitcvs.enabled in config\n". + " --version, -V : Print version information and exit\n". + " --help, -h, -H : Print usage information and exit\n". + "\n". + " ... is a list of allowed directories. If no directories\n". + "are given, all are allowed. This is an additional restriction, gitcvs\n". + "access still needs to be enabled by the gitcvs.enabled config option.\n". + "Alternately, one directory may be specified in GIT_CVSSERVER_ROOT.\n"; + +my @opts = ( 'help|h|H', 'version|V', + 'base-path=s', 'strict-paths', 'export-all' ); +GetOptions( $state, @opts ) + or die $usage; + +if ($state->{version}) { + print "git-cvsserver version $VERSION\n"; + exit; +} +if ($state->{help}) { + print $usage; + exit; +} + +my $TEMP_DIR = tempdir( CLEANUP => 1 ); +$log->debug("Temporary directory is '$TEMP_DIR'"); + +$state->{method} = 'ext'; +if (@ARGV) { + if ($ARGV[0] eq 'pserver') { + $state->{method} = 'pserver'; + shift @ARGV; + } elsif ($ARGV[0] eq 'server') { + shift @ARGV; + } +} + +# everything else is a directory +$state->{allowed_roots} = [ @ARGV ]; + +# don't export the whole system unless the users requests it +if ($state->{'export-all'} && !@{$state->{allowed_roots}}) { + die "--export-all can only be used together with an explicit whitelist\n"; +} + +# Environment handling for running under git-shell +if (exists $ENV{GIT_CVSSERVER_BASE_PATH}) { + if ($state->{'base-path'}) { + die "Cannot specify base path both ways.\n"; + } + my $base_path = $ENV{GIT_CVSSERVER_BASE_PATH}; + $state->{'base-path'} = $base_path; + $log->debug("Picked up base path '$base_path' from environment.\n"); +} +if (exists $ENV{GIT_CVSSERVER_ROOT}) { + if (@{$state->{allowed_roots}}) { + die "Cannot specify roots both ways: @ARGV\n"; + } + my $allowed_root = $ENV{GIT_CVSSERVER_ROOT}; + $state->{allowed_roots} = [ $allowed_root ]; + $log->debug("Picked up allowed root '$allowed_root' from environment.\n"); +} + +# if we are called with a pserver argument, +# deal with the authentication cat before entering the +# main loop +if ($state->{method} eq 'pserver') { + my $line = ; chomp $line; + unless( $line =~ /^BEGIN (AUTH|VERIFICATION) REQUEST$/) { + die "E Do not understand $line - expecting BEGIN AUTH REQUEST\n"; + } + my $request = $1; + $line = ; chomp $line; + unless (req_Root('root', $line)) { # reuse Root + print "E Invalid root $line \n"; + exit 1; + } + $line = ; chomp $line; + my $user = $line; + $line = ; chomp $line; + my $password = $line; + + if ($user eq 'anonymous') { + # "A" will be 1 byte, use length instead in case the + # encryption method ever changes (yeah, right!) + if (length($password) > 1 ) { + print "E Don't supply a password for the `anonymous' user\n"; + print "I HATE YOU\n"; + exit 1; + } + + # Fall through to LOVE + } else { + # Trying to authenticate a user + if (not exists $cfg->{gitcvs}->{authdb}) { + print "E the repo config file needs a [gitcvs] section with an 'authdb' parameter set to the filename of the authentication database\n"; + print "I HATE YOU\n"; + exit 1; + } + + my $authdb = $cfg->{gitcvs}->{authdb}; + + unless (-e $authdb) { + print "E The authentication database specified in [gitcvs.authdb] does not exist\n"; + print "I HATE YOU\n"; + exit 1; + } + + my $auth_ok; + open my $passwd, "<", $authdb or die $!; + while (<$passwd>) { + if (m{^\Q$user\E:(.*)}) { + if (crypt($user, descramble($password)) eq $1) { + $auth_ok = 1; + } + }; + } + close $passwd; + + unless ($auth_ok) { + print "I HATE YOU\n"; + exit 1; + } + + # Fall through to LOVE + } + + # For checking whether the user is anonymous on commit + $state->{user} = $user; + + $line = ; chomp $line; + unless ($line eq "END $request REQUEST") { + die "E Do not understand $line -- expecting END $request REQUEST\n"; + } + print "I LOVE YOU\n"; + exit if $request eq 'VERIFICATION'; # cvs login + # and now back to our regular programme... +} + +# Keep going until the client closes the connection +while () +{ + chomp; + + # Check to see if we've seen this method, and call appropriate function. + if ( /^([\w-]+)(?:\s+(.*))?$/ and defined($methods->{$1}) ) + { + # use the $methods hash to call the appropriate sub for this command + #$log->info("Method : $1"); + &{$methods->{$1}}($1,$2); + } else { + # log fatal because we don't understand this function. If this happens + # we're fairly screwed because we don't know if the client is expecting + # a response. If it is, the client will hang, we'll hang, and the whole + # thing will be custard. + $log->fatal("Don't understand command $_\n"); + die("Unknown command $_"); + } +} + +$log->debug("Processing time : user=" . (times)[0] . " system=" . (times)[1]); +$log->info("--------------- FINISH -----------------"); + +chdir '/'; +exit 0; + +# Magic catchall method. +# This is the method that will handle all commands we haven't yet +# implemented. It simply sends a warning to the log file indicating a +# command that hasn't been implemented has been invoked. +sub req_CATCHALL +{ + my ( $cmd, $data ) = @_; + $log->warn("Unhandled command : req_$cmd : $data"); +} + +# This method invariably succeeds with an empty response. +sub req_EMPTY +{ + print "ok\n"; +} + +# Root pathname \n +# Response expected: no. Tell the server which CVSROOT to use. Note that +# pathname is a local directory and not a fully qualified CVSROOT variable. +# pathname must already exist; if creating a new root, use the init +# request, not Root. pathname does not include the hostname of the server, +# how to access the server, etc.; by the time the CVS protocol is in use, +# connection, authentication, etc., are already taken care of. The Root +# request must be sent only once, and it must be sent before any requests +# other than Valid-responses, valid-requests, UseUnchanged, Set or init. +sub req_Root +{ + my ( $cmd, $data ) = @_; + $log->debug("req_Root : $data"); + + unless ($data =~ m#^/#) { + print "error 1 Root must be an absolute pathname\n"; + return 0; + } + + my $cvsroot = $state->{'base-path'} || ''; + $cvsroot =~ s#/+$##; + $cvsroot .= $data; + + if ($state->{CVSROOT} + && ($state->{CVSROOT} ne $cvsroot)) { + print "error 1 Conflicting roots specified\n"; + return 0; + } + + $state->{CVSROOT} = $cvsroot; + + $ENV{GIT_DIR} = $state->{CVSROOT} . "/"; + + if (@{$state->{allowed_roots}}) { + my $allowed = 0; + foreach my $dir (@{$state->{allowed_roots}}) { + next unless $dir =~ m#^/#; + $dir =~ s#/+$##; + if ($state->{'strict-paths'}) { + if ($ENV{GIT_DIR} =~ m#^\Q$dir\E/?$#) { + $allowed = 1; + last; + } + } elsif ($ENV{GIT_DIR} =~ m#^\Q$dir\E(/?$|/)#) { + $allowed = 1; + last; + } + } + + unless ($allowed) { + print "E $ENV{GIT_DIR} does not seem to be a valid GIT repository\n"; + print "E \n"; + print "error 1 $ENV{GIT_DIR} is not a valid repository\n"; + return 0; + } + } + + unless (-d $ENV{GIT_DIR} && -e $ENV{GIT_DIR}.'HEAD') { + print "E $ENV{GIT_DIR} does not seem to be a valid GIT repository\n"; + print "E \n"; + print "error 1 $ENV{GIT_DIR} is not a valid repository\n"; + return 0; + } + + my @gitvars = `git config -l`; + if ($?) { + print "E problems executing git-config on the server -- this is not a git repository or the PATH is not set correctly.\n"; + print "E \n"; + print "error 1 - problem executing git-config\n"; + return 0; + } + foreach my $line ( @gitvars ) + { + next unless ( $line =~ /^(gitcvs)\.(?:(ext|pserver)\.)?([\w-]+)=(.*)$/ ); + unless ($2) { + $cfg->{$1}{$3} = $4; + } else { + $cfg->{$1}{$2}{$3} = $4; + } + } + + my $enabled = ($cfg->{gitcvs}{$state->{method}}{enabled} + || $cfg->{gitcvs}{enabled}); + unless ($state->{'export-all'} || + ($enabled && $enabled =~ /^\s*(1|true|yes)\s*$/i)) { + print "E GITCVS emulation needs to be enabled on this repo\n"; + print "E the repo config file needs a [gitcvs] section added, and the parameter 'enabled' set to 1\n"; + print "E \n"; + print "error 1 GITCVS emulation disabled\n"; + return 0; + } + + my $logfile = $cfg->{gitcvs}{$state->{method}}{logfile} || $cfg->{gitcvs}{logfile}; + if ( $logfile ) + { + $log->setfile($logfile); + } else { + $log->nofile(); + } + + return 1; +} + +# Global_option option \n +# Response expected: no. Transmit one of the global options `-q', `-Q', +# `-l', `-t', `-r', or `-n'. option must be one of those strings, no +# variations (such as combining of options) are allowed. For graceful +# handling of valid-requests, it is probably better to make new global +# options separate requests, rather than trying to add them to this +# request. +sub req_Globaloption +{ + my ( $cmd, $data ) = @_; + $log->debug("req_Globaloption : $data"); + $state->{globaloptions}{$data} = 1; +} + +# Valid-responses request-list \n +# Response expected: no. Tell the server what responses the client will +# accept. request-list is a space separated list of tokens. +sub req_Validresponses +{ + my ( $cmd, $data ) = @_; + $log->debug("req_Validresponses : $data"); + + # TODO : re-enable this, currently it's not particularly useful + #$state->{validresponses} = [ split /\s+/, $data ]; +} + +# valid-requests \n +# Response expected: yes. Ask the server to send back a Valid-requests +# response. +sub req_validrequests +{ + my ( $cmd, $data ) = @_; + + $log->debug("req_validrequests"); + + $log->debug("SEND : Valid-requests " . join(" ",keys %$methods)); + $log->debug("SEND : ok"); + + print "Valid-requests " . join(" ",keys %$methods) . "\n"; + print "ok\n"; +} + +# Directory local-directory \n +# Additional data: repository \n. Response expected: no. Tell the server +# what directory to use. The repository should be a directory name from a +# previous server response. Note that this both gives a default for Entry +# and Modified and also for ci and the other commands; normal usage is to +# send Directory for each directory in which there will be an Entry or +# Modified, and then a final Directory for the original directory, then the +# command. The local-directory is relative to the top level at which the +# command is occurring (i.e. the last Directory which is sent before the +# command); to indicate that top level, `.' should be sent for +# local-directory. +sub req_Directory +{ + my ( $cmd, $data ) = @_; + + my $repository = ; + chomp $repository; + + + $state->{localdir} = $data; + $state->{repository} = $repository; + $state->{path} = $repository; + $state->{path} =~ s/^\Q$state->{CVSROOT}\E\///; + $state->{module} = $1 if ($state->{path} =~ s/^(.*?)(\/|$)//); + $state->{path} .= "/" if ( $state->{path} =~ /\S/ ); + + $state->{directory} = $state->{localdir}; + $state->{directory} = "" if ( $state->{directory} eq "." ); + $state->{directory} .= "/" if ( $state->{directory} =~ /\S/ ); + + if ( (not defined($state->{prependdir}) or $state->{prependdir} eq '') and $state->{localdir} eq "." and $state->{path} =~ /\S/ ) + { + $log->info("Setting prepend to '$state->{path}'"); + $state->{prependdir} = $state->{path}; + foreach my $entry ( keys %{$state->{entries}} ) + { + $state->{entries}{$state->{prependdir} . $entry} = $state->{entries}{$entry}; + delete $state->{entries}{$entry}; + } + } + + if ( defined ( $state->{prependdir} ) ) + { + $log->debug("Prepending '$state->{prependdir}' to state|directory"); + $state->{directory} = $state->{prependdir} . $state->{directory} + } + $log->debug("req_Directory : localdir=$data repository=$repository path=$state->{path} directory=$state->{directory} module=$state->{module}"); +} + +# Entry entry-line \n +# Response expected: no. Tell the server what version of a file is on the +# local machine. The name in entry-line is a name relative to the directory +# most recently specified with Directory. If the user is operating on only +# some files in a directory, Entry requests for only those files need be +# included. If an Entry request is sent without Modified, Is-modified, or +# Unchanged, it means the file is lost (does not exist in the working +# directory). If both Entry and one of Modified, Is-modified, or Unchanged +# are sent for the same file, Entry must be sent first. For a given file, +# one can send Modified, Is-modified, or Unchanged, but not more than one +# of these three. +sub req_Entry +{ + my ( $cmd, $data ) = @_; + + #$log->debug("req_Entry : $data"); + + my @data = split(/\//, $data); + + $state->{entries}{$state->{directory}.$data[1]} = { + revision => $data[2], + conflict => $data[3], + options => $data[4], + tag_or_date => $data[5], + }; + + $log->info("Received entry line '$data' => '" . $state->{directory} . $data[1] . "'"); +} + +# Questionable filename \n +# Response expected: no. Additional data: no. Tell the server to check +# whether filename should be ignored, and if not, next time the server +# sends responses, send (in a M response) `?' followed by the directory and +# filename. filename must not contain `/'; it needs to be a file in the +# directory named by the most recent Directory request. +sub req_Questionable +{ + my ( $cmd, $data ) = @_; + + $log->debug("req_Questionable : $data"); + $state->{entries}{$state->{directory}.$data}{questionable} = 1; +} + +# add \n +# Response expected: yes. Add a file or directory. This uses any previous +# Argument, Directory, Entry, or Modified requests, if they have been sent. +# The last Directory sent specifies the working directory at the time of +# the operation. To add a directory, send the directory to be added using +# Directory and Argument requests. +sub req_add +{ + my ( $cmd, $data ) = @_; + + argsplit("add"); + + my $updater = GITCVS::updater->new($state->{CVSROOT}, $state->{module}, $log); + $updater->update(); + + argsfromdir($updater); + + my $addcount = 0; + + foreach my $filename ( @{$state->{args}} ) + { + $filename = filecleanup($filename); + + my $meta = $updater->getmeta($filename); + my $wrev = revparse($filename); + + if ($wrev && $meta && ($wrev < 0)) + { + # previously removed file, add back + $log->info("added file $filename was previously removed, send 1.$meta->{revision}"); + + print "MT +updated\n"; + print "MT text U \n"; + print "MT fname $filename\n"; + print "MT newline\n"; + print "MT -updated\n"; + + unless ( $state->{globaloptions}{-n} ) + { + my ( $filepart, $dirpart ) = filenamesplit($filename,1); + + print "Created $dirpart\n"; + print $state->{CVSROOT} . "/$state->{module}/$filename\n"; + + # this is an "entries" line + my $kopts = kopts_from_path($filename,"sha1",$meta->{filehash}); + $log->debug("/$filepart/1.$meta->{revision}//$kopts/"); + print "/$filepart/1.$meta->{revision}//$kopts/\n"; + # permissions + $log->debug("SEND : u=$meta->{mode},g=$meta->{mode},o=$meta->{mode}"); + print "u=$meta->{mode},g=$meta->{mode},o=$meta->{mode}\n"; + # transmit file + transmitfile($meta->{filehash}); + } + + next; + } + + unless ( defined ( $state->{entries}{$filename}{modified_filename} ) ) + { + print "E cvs add: nothing known about `$filename'\n"; + next; + } + # TODO : check we're not squashing an already existing file + if ( defined ( $state->{entries}{$filename}{revision} ) ) + { + print "E cvs add: `$filename' has already been entered\n"; + next; + } + + my ( $filepart, $dirpart ) = filenamesplit($filename, 1); + + print "E cvs add: scheduling file `$filename' for addition\n"; + + print "Checked-in $dirpart\n"; + print "$filename\n"; + my $kopts = kopts_from_path($filename,"file", + $state->{entries}{$filename}{modified_filename}); + print "/$filepart/0//$kopts/\n"; + + my $requestedKopts = $state->{opt}{k}; + if(defined($requestedKopts)) + { + $requestedKopts = "-k$requestedKopts"; + } + else + { + $requestedKopts = ""; + } + if( $kopts ne $requestedKopts ) + { + $log->warn("Ignoring requested -k='$requestedKopts'" + . " for '$filename'; detected -k='$kopts' instead"); + #TODO: Also have option to send warning to user? + } + + $addcount++; + } + + if ( $addcount == 1 ) + { + print "E cvs add: use `cvs commit' to add this file permanently\n"; + } + elsif ( $addcount > 1 ) + { + print "E cvs add: use `cvs commit' to add these files permanently\n"; + } + + print "ok\n"; +} + +# remove \n +# Response expected: yes. Remove a file. This uses any previous Argument, +# Directory, Entry, or Modified requests, if they have been sent. The last +# Directory sent specifies the working directory at the time of the +# operation. Note that this request does not actually do anything to the +# repository; the only effect of a successful remove request is to supply +# the client with a new entries line containing `-' to indicate a removed +# file. In fact, the client probably could perform this operation without +# contacting the server, although using remove may cause the server to +# perform a few more checks. The client sends a subsequent ci request to +# actually record the removal in the repository. +sub req_remove +{ + my ( $cmd, $data ) = @_; + + argsplit("remove"); + + # Grab a handle to the SQLite db and do any necessary updates + my $updater = GITCVS::updater->new($state->{CVSROOT}, $state->{module}, $log); + $updater->update(); + + #$log->debug("add state : " . Dumper($state)); + + my $rmcount = 0; + + foreach my $filename ( @{$state->{args}} ) + { + $filename = filecleanup($filename); + + if ( defined ( $state->{entries}{$filename}{unchanged} ) or defined ( $state->{entries}{$filename}{modified_filename} ) ) + { + print "E cvs remove: file `$filename' still in working directory\n"; + next; + } + + my $meta = $updater->getmeta($filename); + my $wrev = revparse($filename); + + unless ( defined ( $wrev ) ) + { + print "E cvs remove: nothing known about `$filename'\n"; + next; + } + + if ( defined($wrev) and $wrev < 0 ) + { + print "E cvs remove: file `$filename' already scheduled for removal\n"; + next; + } + + unless ( $wrev == $meta->{revision} ) + { + # TODO : not sure if the format of this message is quite correct. + print "E cvs remove: Up to date check failed for `$filename'\n"; + next; + } + + + my ( $filepart, $dirpart ) = filenamesplit($filename, 1); + + print "E cvs remove: scheduling `$filename' for removal\n"; + + print "Checked-in $dirpart\n"; + print "$filename\n"; + my $kopts = kopts_from_path($filename,"sha1",$meta->{filehash}); + print "/$filepart/-1.$wrev//$kopts/\n"; + + $rmcount++; + } + + if ( $rmcount == 1 ) + { + print "E cvs remove: use `cvs commit' to remove this file permanently\n"; + } + elsif ( $rmcount > 1 ) + { + print "E cvs remove: use `cvs commit' to remove these files permanently\n"; + } + + print "ok\n"; +} + +# Modified filename \n +# Response expected: no. Additional data: mode, \n, file transmission. Send +# the server a copy of one locally modified file. filename is a file within +# the most recent directory sent with Directory; it must not contain `/'. +# If the user is operating on only some files in a directory, only those +# files need to be included. This can also be sent without Entry, if there +# is no entry for the file. +sub req_Modified +{ + my ( $cmd, $data ) = @_; + + my $mode = ; + defined $mode + or (print "E end of file reading mode for $data\n"), return; + chomp $mode; + my $size = ; + defined $size + or (print "E end of file reading size of $data\n"), return; + chomp $size; + + # Grab config information + my $blocksize = 8192; + my $bytesleft = $size; + my $tmp; + + # Get a filehandle/name to write it to + my ( $fh, $filename ) = tempfile( DIR => $TEMP_DIR ); + + # Loop over file data writing out to temporary file. + while ( $bytesleft ) + { + $blocksize = $bytesleft if ( $bytesleft < $blocksize ); + read STDIN, $tmp, $blocksize; + print $fh $tmp; + $bytesleft -= $blocksize; + } + + close $fh + or (print "E failed to write temporary, $filename: $!\n"), return; + + # Ensure we have something sensible for the file mode + if ( $mode =~ /u=(\w+)/ ) + { + $mode = $1; + } else { + $mode = "rw"; + } + + # Save the file data in $state + $state->{entries}{$state->{directory}.$data}{modified_filename} = $filename; + $state->{entries}{$state->{directory}.$data}{modified_mode} = $mode; + $state->{entries}{$state->{directory}.$data}{modified_hash} = `git hash-object $filename`; + $state->{entries}{$state->{directory}.$data}{modified_hash} =~ s/\s.*$//s; + + #$log->debug("req_Modified : file=$data mode=$mode size=$size"); +} + +# Unchanged filename \n +# Response expected: no. Tell the server that filename has not been +# modified in the checked out directory. The filename is a file within the +# most recent directory sent with Directory; it must not contain `/'. +sub req_Unchanged +{ + my ( $cmd, $data ) = @_; + + $state->{entries}{$state->{directory}.$data}{unchanged} = 1; + + #$log->debug("req_Unchanged : $data"); +} + +# Argument text \n +# Response expected: no. Save argument for use in a subsequent command. +# Arguments accumulate until an argument-using command is given, at which +# point they are forgotten. +# Argumentx text \n +# Response expected: no. Append \n followed by text to the current argument +# being saved. +sub req_Argument +{ + my ( $cmd, $data ) = @_; + + # Argumentx means: append to last Argument (with a newline in front) + + $log->debug("$cmd : $data"); + + if ( $cmd eq 'Argumentx') { + ${$state->{arguments}}[$#{$state->{arguments}}] .= "\n" . $data; + } else { + push @{$state->{arguments}}, $data; + } +} + +# expand-modules \n +# Response expected: yes. Expand the modules which are specified in the +# arguments. Returns the data in Module-expansion responses. Note that the +# server can assume that this is checkout or export, not rtag or rdiff; the +# latter do not access the working directory and thus have no need to +# expand modules on the client side. Expand may not be the best word for +# what this request does. It does not necessarily tell you all the files +# contained in a module, for example. Basically it is a way of telling you +# which working directories the server needs to know about in order to +# handle a checkout of the specified modules. For example, suppose that the +# server has a module defined by +# aliasmodule -a 1dir +# That is, one can check out aliasmodule and it will take 1dir in the +# repository and check it out to 1dir in the working directory. Now suppose +# the client already has this module checked out and is planning on using +# the co request to update it. Without using expand-modules, the client +# would have two bad choices: it could either send information about all +# working directories under the current directory, which could be +# unnecessarily slow, or it could be ignorant of the fact that aliasmodule +# stands for 1dir, and neglect to send information for 1dir, which would +# lead to incorrect operation. With expand-modules, the client would first +# ask for the module to be expanded: +sub req_expandmodules +{ + my ( $cmd, $data ) = @_; + + argsplit(); + + $log->debug("req_expandmodules : " . ( defined($data) ? $data : "[NULL]" ) ); + + unless ( ref $state->{arguments} eq "ARRAY" ) + { + print "ok\n"; + return; + } + + foreach my $module ( @{$state->{arguments}} ) + { + $log->debug("SEND : Module-expansion $module"); + print "Module-expansion $module\n"; + } + + print "ok\n"; + statecleanup(); +} + +# co \n +# Response expected: yes. Get files from the repository. This uses any +# previous Argument, Directory, Entry, or Modified requests, if they have +# been sent. Arguments to this command are module names; the client cannot +# know what directories they correspond to except by (1) just sending the +# co request, and then seeing what directory names the server sends back in +# its responses, and (2) the expand-modules request. +sub req_co +{ + my ( $cmd, $data ) = @_; + + argsplit("co"); + + # Provide list of modules, if -c was used. + if (exists $state->{opt}{c}) { + my $showref = `git show-ref --heads`; + for my $line (split '\n', $showref) { + if ( $line =~ m% refs/heads/(.*)$% ) { + print "M $1\t$1\n"; + } + } + print "ok\n"; + return 1; + } + + my $module = $state->{args}[0]; + $state->{module} = $module; + my $checkout_path = $module; + + # use the user specified directory if we're given it + $checkout_path = $state->{opt}{d} if ( exists ( $state->{opt}{d} ) ); + + $log->debug("req_co : " . ( defined($data) ? $data : "[NULL]" ) ); + + $log->info("Checking out module '$module' ($state->{CVSROOT}) to '$checkout_path'"); + + $ENV{GIT_DIR} = $state->{CVSROOT} . "/"; + + # Grab a handle to the SQLite db and do any necessary updates + my $updater = GITCVS::updater->new($state->{CVSROOT}, $module, $log); + $updater->update(); + + $checkout_path =~ s|/$||; # get rid of trailing slashes + + # Eclipse seems to need the Clear-sticky command + # to prepare the 'Entries' file for the new directory. + print "Clear-sticky $checkout_path/\n"; + print $state->{CVSROOT} . "/$module/\n"; + print "Clear-static-directory $checkout_path/\n"; + print $state->{CVSROOT} . "/$module/\n"; + print "Clear-sticky $checkout_path/\n"; # yes, twice + print $state->{CVSROOT} . "/$module/\n"; + print "Template $checkout_path/\n"; + print $state->{CVSROOT} . "/$module/\n"; + print "0\n"; + + # instruct the client that we're checking out to $checkout_path + print "E cvs checkout: Updating $checkout_path\n"; + + my %seendirs = (); + my $lastdir =''; + + # recursive + sub prepdir { + my ($dir, $repodir, $remotedir, $seendirs) = @_; + my $parent = dirname($dir); + $dir =~ s|/+$||; + $repodir =~ s|/+$||; + $remotedir =~ s|/+$||; + $parent =~ s|/+$||; + $log->debug("announcedir $dir, $repodir, $remotedir" ); + + if ($parent eq '.' || $parent eq './') { + $parent = ''; + } + # recurse to announce unseen parents first + if (length($parent) && !exists($seendirs->{$parent})) { + prepdir($parent, $repodir, $remotedir, $seendirs); + } + # Announce that we are going to modify at the parent level + if ($parent) { + print "E cvs checkout: Updating $remotedir/$parent\n"; + } else { + print "E cvs checkout: Updating $remotedir\n"; + } + print "Clear-sticky $remotedir/$parent/\n"; + print "$repodir/$parent/\n"; + + print "Clear-static-directory $remotedir/$dir/\n"; + print "$repodir/$dir/\n"; + print "Clear-sticky $remotedir/$parent/\n"; # yes, twice + print "$repodir/$parent/\n"; + print "Template $remotedir/$dir/\n"; + print "$repodir/$dir/\n"; + print "0\n"; + + $seendirs->{$dir} = 1; + } + + foreach my $git ( @{$updater->gethead} ) + { + # Don't want to check out deleted files + next if ( $git->{filehash} eq "deleted" ); + + my $fullName = $git->{name}; + ( $git->{name}, $git->{dir} ) = filenamesplit($git->{name}); + + if (length($git->{dir}) && $git->{dir} ne './' + && $git->{dir} ne $lastdir ) { + unless (exists($seendirs{$git->{dir}})) { + prepdir($git->{dir}, $state->{CVSROOT} . "/$module/", + $checkout_path, \%seendirs); + $lastdir = $git->{dir}; + $seendirs{$git->{dir}} = 1; + } + print "E cvs checkout: Updating /$checkout_path/$git->{dir}\n"; + } + + # modification time of this file + print "Mod-time $git->{modified}\n"; + + # print some information to the client + if ( defined ( $git->{dir} ) and $git->{dir} ne "./" ) + { + print "M U $checkout_path/$git->{dir}$git->{name}\n"; + } else { + print "M U $checkout_path/$git->{name}\n"; + } + + # instruct client we're sending a file to put in this path + print "Created $checkout_path/" . ( defined ( $git->{dir} ) and $git->{dir} ne "./" ? $git->{dir} . "/" : "" ) . "\n"; + + print $state->{CVSROOT} . "/$module/" . ( defined ( $git->{dir} ) and $git->{dir} ne "./" ? $git->{dir} . "/" : "" ) . "$git->{name}\n"; + + # this is an "entries" line + my $kopts = kopts_from_path($fullName,"sha1",$git->{filehash}); + print "/$git->{name}/1.$git->{revision}//$kopts/\n"; + # permissions + print "u=$git->{mode},g=$git->{mode},o=$git->{mode}\n"; + + # transmit file + transmitfile($git->{filehash}); + } + + print "ok\n"; + + statecleanup(); +} + +# update \n +# Response expected: yes. Actually do a cvs update command. This uses any +# previous Argument, Directory, Entry, or Modified requests, if they have +# been sent. The last Directory sent specifies the working directory at the +# time of the operation. The -I option is not used--files which the client +# can decide whether to ignore are not mentioned and the client sends the +# Questionable request for others. +sub req_update +{ + my ( $cmd, $data ) = @_; + + $log->debug("req_update : " . ( defined($data) ? $data : "[NULL]" )); + + argsplit("update"); + + # + # It may just be a client exploring the available heads/modules + # in that case, list them as top level directories and leave it + # at that. Eclipse uses this technique to offer you a list of + # projects (heads in this case) to checkout. + # + if ($state->{module} eq '') { + my $showref = `git show-ref --heads`; + print "E cvs update: Updating .\n"; + for my $line (split '\n', $showref) { + if ( $line =~ m% refs/heads/(.*)$% ) { + print "E cvs update: New directory `$1'\n"; + } + } + print "ok\n"; + return 1; + } + + + # Grab a handle to the SQLite db and do any necessary updates + my $updater = GITCVS::updater->new($state->{CVSROOT}, $state->{module}, $log); + + $updater->update(); + + argsfromdir($updater); + + #$log->debug("update state : " . Dumper($state)); + + my $last_dirname = "///"; + + # foreach file specified on the command line ... + foreach my $filename ( @{$state->{args}} ) + { + $filename = filecleanup($filename); + + $log->debug("Processing file $filename"); + + unless ( $state->{globaloptions}{-Q} || $state->{globaloptions}{-q} ) + { + my $cur_dirname = dirname($filename); + if ( $cur_dirname ne $last_dirname ) + { + $last_dirname = $cur_dirname; + if ( $cur_dirname eq "" ) + { + $cur_dirname = "."; + } + print "E cvs update: Updating $cur_dirname\n"; + } + } + + # if we have a -C we should pretend we never saw modified stuff + if ( exists ( $state->{opt}{C} ) ) + { + delete $state->{entries}{$filename}{modified_hash}; + delete $state->{entries}{$filename}{modified_filename}; + $state->{entries}{$filename}{unchanged} = 1; + } + + my $meta; + if ( defined($state->{opt}{r}) and $state->{opt}{r} =~ /^1\.(\d+)/ ) + { + $meta = $updater->getmeta($filename, $1); + } else { + $meta = $updater->getmeta($filename); + } + + # If -p was given, "print" the contents of the requested revision. + if ( exists ( $state->{opt}{p} ) ) { + if ( defined ( $meta->{revision} ) ) { + $log->info("Printing '$filename' revision " . $meta->{revision}); + + transmitfile($meta->{filehash}, { print => 1 }); + } + + next; + } + + if ( ! defined $meta ) + { + $meta = { + name => $filename, + revision => 0, + filehash => 'added' + }; + } + + my $oldmeta = $meta; + + my $wrev = revparse($filename); + + # If the working copy is an old revision, lets get that version too for comparison. + if ( defined($wrev) and $wrev != $meta->{revision} ) + { + $oldmeta = $updater->getmeta($filename, $wrev); + } + + #$log->debug("Target revision is $meta->{revision}, current working revision is $wrev"); + + # Files are up to date if the working copy and repo copy have the same revision, + # and the working copy is unmodified _and_ the user hasn't specified -C + next if ( defined ( $wrev ) + and defined($meta->{revision}) + and $wrev == $meta->{revision} + and $state->{entries}{$filename}{unchanged} + and not exists ( $state->{opt}{C} ) ); + + # If the working copy and repo copy have the same revision, + # but the working copy is modified, tell the client it's modified + if ( defined ( $wrev ) + and defined($meta->{revision}) + and $wrev == $meta->{revision} + and defined($state->{entries}{$filename}{modified_hash}) + and not exists ( $state->{opt}{C} ) ) + { + $log->info("Tell the client the file is modified"); + print "MT text M \n"; + print "MT fname $filename\n"; + print "MT newline\n"; + next; + } + + if ( $meta->{filehash} eq "deleted" ) + { + my ( $filepart, $dirpart ) = filenamesplit($filename,1); + + $log->info("Removing '$filename' from working copy (no longer in the repo)"); + + print "E cvs update: `$filename' is no longer in the repository\n"; + # Don't want to actually _DO_ the update if -n specified + unless ( $state->{globaloptions}{-n} ) { + print "Removed $dirpart\n"; + print "$filepart\n"; + } + } + elsif ( not defined ( $state->{entries}{$filename}{modified_hash} ) + or $state->{entries}{$filename}{modified_hash} eq $oldmeta->{filehash} + or $meta->{filehash} eq 'added' ) + { + # normal update, just send the new revision (either U=Update, + # or A=Add, or R=Remove) + if ( defined($wrev) && $wrev < 0 ) + { + $log->info("Tell the client the file is scheduled for removal"); + print "MT text R \n"; + print "MT fname $filename\n"; + print "MT newline\n"; + next; + } + elsif ( (!defined($wrev) || $wrev == 0) && (!defined($meta->{revision}) || $meta->{revision} == 0) ) + { + $log->info("Tell the client the file is scheduled for addition"); + print "MT text A \n"; + print "MT fname $filename\n"; + print "MT newline\n"; + next; + + } + else { + $log->info("Updating '$filename' to ".$meta->{revision}); + print "MT +updated\n"; + print "MT text U \n"; + print "MT fname $filename\n"; + print "MT newline\n"; + print "MT -updated\n"; + } + + my ( $filepart, $dirpart ) = filenamesplit($filename,1); + + # Don't want to actually _DO_ the update if -n specified + unless ( $state->{globaloptions}{-n} ) + { + if ( defined ( $wrev ) ) + { + # instruct client we're sending a file to put in this path as a replacement + print "Update-existing $dirpart\n"; + $log->debug("Updating existing file 'Update-existing $dirpart'"); + } else { + # instruct client we're sending a file to put in this path as a new file + print "Clear-static-directory $dirpart\n"; + print $state->{CVSROOT} . "/$state->{module}/$dirpart\n"; + print "Clear-sticky $dirpart\n"; + print $state->{CVSROOT} . "/$state->{module}/$dirpart\n"; + + $log->debug("Creating new file 'Created $dirpart'"); + print "Created $dirpart\n"; + } + print $state->{CVSROOT} . "/$state->{module}/$filename\n"; + + # this is an "entries" line + my $kopts = kopts_from_path($filename,"sha1",$meta->{filehash}); + $log->debug("/$filepart/1.$meta->{revision}//$kopts/"); + print "/$filepart/1.$meta->{revision}//$kopts/\n"; + + # permissions + $log->debug("SEND : u=$meta->{mode},g=$meta->{mode},o=$meta->{mode}"); + print "u=$meta->{mode},g=$meta->{mode},o=$meta->{mode}\n"; + + # transmit file + transmitfile($meta->{filehash}); + } + } else { + $log->info("Updating '$filename'"); + my ( $filepart, $dirpart ) = filenamesplit($meta->{name},1); + + my $mergeDir = setupTmpDir(); + + my $file_local = $filepart . ".mine"; + my $mergedFile = "$mergeDir/$file_local"; + system("ln","-s",$state->{entries}{$filename}{modified_filename}, $file_local); + my $file_old = $filepart . "." . $oldmeta->{revision}; + transmitfile($oldmeta->{filehash}, { targetfile => $file_old }); + my $file_new = $filepart . "." . $meta->{revision}; + transmitfile($meta->{filehash}, { targetfile => $file_new }); + + # we need to merge with the local changes ( M=successful merge, C=conflict merge ) + $log->info("Merging $file_local, $file_old, $file_new"); + print "M Merging differences between 1.$oldmeta->{revision} and 1.$meta->{revision} into $filename\n"; + + $log->debug("Temporary directory for merge is $mergeDir"); + + my $return = system("git", "merge-file", $file_local, $file_old, $file_new); + $return >>= 8; + + cleanupTmpDir(); + + if ( $return == 0 ) + { + $log->info("Merged successfully"); + print "M M $filename\n"; + $log->debug("Merged $dirpart"); + + # Don't want to actually _DO_ the update if -n specified + unless ( $state->{globaloptions}{-n} ) + { + print "Merged $dirpart\n"; + $log->debug($state->{CVSROOT} . "/$state->{module}/$filename"); + print $state->{CVSROOT} . "/$state->{module}/$filename\n"; + my $kopts = kopts_from_path("$dirpart/$filepart", + "file",$mergedFile); + $log->debug("/$filepart/1.$meta->{revision}//$kopts/"); + print "/$filepart/1.$meta->{revision}//$kopts/\n"; + } + } + elsif ( $return == 1 ) + { + $log->info("Merged with conflicts"); + print "E cvs update: conflicts found in $filename\n"; + print "M C $filename\n"; + + # Don't want to actually _DO_ the update if -n specified + unless ( $state->{globaloptions}{-n} ) + { + print "Merged $dirpart\n"; + print $state->{CVSROOT} . "/$state->{module}/$filename\n"; + my $kopts = kopts_from_path("$dirpart/$filepart", + "file",$mergedFile); + print "/$filepart/1.$meta->{revision}/+/$kopts/\n"; + } + } + else + { + $log->warn("Merge failed"); + next; + } + + # Don't want to actually _DO_ the update if -n specified + unless ( $state->{globaloptions}{-n} ) + { + # permissions + $log->debug("SEND : u=$meta->{mode},g=$meta->{mode},o=$meta->{mode}"); + print "u=$meta->{mode},g=$meta->{mode},o=$meta->{mode}\n"; + + # transmit file, format is single integer on a line by itself (file + # size) followed by the file contents + # TODO : we should copy files in blocks + my $data = `cat $mergedFile`; + $log->debug("File size : " . length($data)); + print length($data) . "\n"; + print $data; + } + } + + } + + print "ok\n"; +} + +sub req_ci +{ + my ( $cmd, $data ) = @_; + + argsplit("ci"); + + #$log->debug("State : " . Dumper($state)); + + $log->info("req_ci : " . ( defined($data) ? $data : "[NULL]" )); + + if ( $state->{method} eq 'pserver' and $state->{user} eq 'anonymous' ) + { + print "error 1 anonymous user cannot commit via pserver\n"; + cleanupWorkTree(); + exit; + } + + if ( -e $state->{CVSROOT} . "/index" ) + { + $log->warn("file 'index' already exists in the git repository"); + print "error 1 Index already exists in git repo\n"; + cleanupWorkTree(); + exit; + } + + # Grab a handle to the SQLite db and do any necessary updates + my $updater = GITCVS::updater->new($state->{CVSROOT}, $state->{module}, $log); + $updater->update(); + + # Remember where the head was at the beginning. + my $parenthash = `git show-ref -s refs/heads/$state->{module}`; + chomp $parenthash; + if ($parenthash !~ /^[0-9a-f]{40}$/) { + print "error 1 pserver cannot find the current HEAD of module"; + cleanupWorkTree(); + exit; + } + + setupWorkTree($parenthash); + + $log->info("Lockless commit start, basing commit on '$work->{workDir}', index file is '$work->{index}'"); + + $log->info("Created index '$work->{index}' for head $state->{module} - exit status $?"); + + my @committedfiles = (); + my %oldmeta; + + # foreach file specified on the command line ... + foreach my $filename ( @{$state->{args}} ) + { + my $committedfile = $filename; + $filename = filecleanup($filename); + + next unless ( exists $state->{entries}{$filename}{modified_filename} or not $state->{entries}{$filename}{unchanged} ); + + my $meta = $updater->getmeta($filename); + $oldmeta{$filename} = $meta; + + my $wrev = revparse($filename); + + my ( $filepart, $dirpart ) = filenamesplit($filename); + + # do a checkout of the file if it is part of this tree + if ($wrev) { + system('git', 'checkout-index', '-f', '-u', $filename); + unless ($? == 0) { + die "Error running git-checkout-index -f -u $filename : $!"; + } + } + + my $addflag = 0; + my $rmflag = 0; + $rmflag = 1 if ( defined($wrev) and $wrev < 0 ); + $addflag = 1 unless ( -e $filename ); + + # Do up to date checking + unless ( $addflag or $wrev == $meta->{revision} or ( $rmflag and -$wrev == $meta->{revision} ) ) + { + # fail everything if an up to date check fails + print "error 1 Up to date check failed for $filename\n"; + cleanupWorkTree(); + exit; + } + + push @committedfiles, $committedfile; + $log->info("Committing $filename"); + + system("mkdir","-p",$dirpart) unless ( -d $dirpart ); + + unless ( $rmflag ) + { + $log->debug("rename $state->{entries}{$filename}{modified_filename} $filename"); + rename $state->{entries}{$filename}{modified_filename},$filename; + + # Calculate modes to remove + my $invmode = ""; + foreach ( qw (r w x) ) { $invmode .= $_ unless ( $state->{entries}{$filename}{modified_mode} =~ /$_/ ); } + + $log->debug("chmod u+" . $state->{entries}{$filename}{modified_mode} . "-" . $invmode . " $filename"); + system("chmod","u+" . $state->{entries}{$filename}{modified_mode} . "-" . $invmode, $filename); + } + + if ( $rmflag ) + { + $log->info("Removing file '$filename'"); + unlink($filename); + system("git", "update-index", "--remove", $filename); + } + elsif ( $addflag ) + { + $log->info("Adding file '$filename'"); + system("git", "update-index", "--add", $filename); + } else { + $log->info("Updating file '$filename'"); + system("git", "update-index", $filename); + } + } + + unless ( scalar(@committedfiles) > 0 ) + { + print "E No files to commit\n"; + print "ok\n"; + cleanupWorkTree(); + return; + } + + my $treehash = `git write-tree`; + chomp $treehash; + + $log->debug("Treehash : $treehash, Parenthash : $parenthash"); + + # write our commit message out if we have one ... + my ( $msg_fh, $msg_filename ) = tempfile( DIR => $TEMP_DIR ); + print $msg_fh $state->{opt}{m};# if ( exists ( $state->{opt}{m} ) ); + if ( defined ( $cfg->{gitcvs}{commitmsgannotation} ) ) { + if ($cfg->{gitcvs}{commitmsgannotation} !~ /^\s*$/ ) { + print $msg_fh "\n\n".$cfg->{gitcvs}{commitmsgannotation}."\n" + } + } else { + print $msg_fh "\n\nvia git-CVS emulator\n"; + } + close $msg_fh; + + my $commithash = `git commit-tree $treehash -p $parenthash < $msg_filename`; + chomp($commithash); + $log->info("Commit hash : $commithash"); + + unless ( $commithash =~ /[a-zA-Z0-9]{40}/ ) + { + $log->warn("Commit failed (Invalid commit hash)"); + print "error 1 Commit failed (unknown reason)\n"; + cleanupWorkTree(); + exit; + } + + ### Emulate git-receive-pack by running hooks/update + my @hook = ( $ENV{GIT_DIR}.'hooks/update', "refs/heads/$state->{module}", + $parenthash, $commithash ); + if( -x $hook[0] ) { + unless( system( @hook ) == 0 ) + { + $log->warn("Commit failed (update hook declined to update ref)"); + print "error 1 Commit failed (update hook declined)\n"; + cleanupWorkTree(); + exit; + } + } + + ### Update the ref + if (system(qw(git update-ref -m), "cvsserver ci", + "refs/heads/$state->{module}", $commithash, $parenthash)) { + $log->warn("update-ref for $state->{module} failed."); + print "error 1 Cannot commit -- update first\n"; + cleanupWorkTree(); + exit; + } + + ### Emulate git-receive-pack by running hooks/post-receive + my $hook = $ENV{GIT_DIR}.'hooks/post-receive'; + if( -x $hook ) { + open(my $pipe, "| $hook") || die "can't fork $!"; + + local $SIG{PIPE} = sub { die 'pipe broke' }; + + print $pipe "$parenthash $commithash refs/heads/$state->{module}\n"; + + close $pipe || die "bad pipe: $! $?"; + } + + $updater->update(); + + ### Then hooks/post-update + $hook = $ENV{GIT_DIR}.'hooks/post-update'; + if (-x $hook) { + system($hook, "refs/heads/$state->{module}"); + } + + # foreach file specified on the command line ... + foreach my $filename ( @committedfiles ) + { + $filename = filecleanup($filename); + + my $meta = $updater->getmeta($filename); + unless (defined $meta->{revision}) { + $meta->{revision} = 1; + } + + my ( $filepart, $dirpart ) = filenamesplit($filename, 1); + + $log->debug("Checked-in $dirpart : $filename"); + + print "M $state->{CVSROOT}/$state->{module}/$filename,v <-- $dirpart$filepart\n"; + if ( defined $meta->{filehash} && $meta->{filehash} eq "deleted" ) + { + print "M new revision: delete; previous revision: 1.$oldmeta{$filename}{revision}\n"; + print "Remove-entry $dirpart\n"; + print "$filename\n"; + } else { + if ($meta->{revision} == 1) { + print "M initial revision: 1.1\n"; + } else { + print "M new revision: 1.$meta->{revision}; previous revision: 1.$oldmeta{$filename}{revision}\n"; + } + print "Checked-in $dirpart\n"; + print "$filename\n"; + my $kopts = kopts_from_path($filename,"sha1",$meta->{filehash}); + print "/$filepart/1.$meta->{revision}//$kopts/\n"; + } + } + + cleanupWorkTree(); + print "ok\n"; +} + +sub req_status +{ + my ( $cmd, $data ) = @_; + + argsplit("status"); + + $log->info("req_status : " . ( defined($data) ? $data : "[NULL]" )); + #$log->debug("status state : " . Dumper($state)); + + # Grab a handle to the SQLite db and do any necessary updates + my $updater = GITCVS::updater->new($state->{CVSROOT}, $state->{module}, $log); + $updater->update(); + + # if no files were specified, we need to work out what files we should be providing status on ... + argsfromdir($updater); + + # foreach file specified on the command line ... + foreach my $filename ( @{$state->{args}} ) + { + $filename = filecleanup($filename); + + next if exists($state->{opt}{l}) && index($filename, '/', length($state->{prependdir})) >= 0; + + my $meta = $updater->getmeta($filename); + my $oldmeta = $meta; + + my $wrev = revparse($filename); + + # If the working copy is an old revision, lets get that version too for comparison. + if ( defined($wrev) and $wrev != $meta->{revision} ) + { + $oldmeta = $updater->getmeta($filename, $wrev); + } + + # TODO : All possible statuses aren't yet implemented + my $status; + # Files are up to date if the working copy and repo copy have the same revision, and the working copy is unmodified + $status = "Up-to-date" if ( defined ( $wrev ) and defined($meta->{revision}) and $wrev == $meta->{revision} + and + ( ( $state->{entries}{$filename}{unchanged} and ( not defined ( $state->{entries}{$filename}{conflict} ) or $state->{entries}{$filename}{conflict} !~ /^\+=/ ) ) + or ( defined($state->{entries}{$filename}{modified_hash}) and $state->{entries}{$filename}{modified_hash} eq $meta->{filehash} ) ) + ); + + # Need checkout if the working copy has an older revision than the repo copy, and the working copy is unmodified + $status ||= "Needs Checkout" if ( defined ( $wrev ) and defined ( $meta->{revision} ) and $meta->{revision} > $wrev + and + ( $state->{entries}{$filename}{unchanged} + or ( defined($state->{entries}{$filename}{modified_hash}) and $state->{entries}{$filename}{modified_hash} eq $oldmeta->{filehash} ) ) + ); + + # Need checkout if it exists in the repo but doesn't have a working copy + $status ||= "Needs Checkout" if ( not defined ( $wrev ) and defined ( $meta->{revision} ) ); + + # Locally modified if working copy and repo copy have the same revision but there are local changes + $status ||= "Locally Modified" if ( defined ( $wrev ) and defined($meta->{revision}) and $wrev == $meta->{revision} and $state->{entries}{$filename}{modified_filename} ); + + # Needs Merge if working copy revision is less than repo copy and there are local changes + $status ||= "Needs Merge" if ( defined ( $wrev ) and defined ( $meta->{revision} ) and $meta->{revision} > $wrev and $state->{entries}{$filename}{modified_filename} ); + + $status ||= "Locally Added" if ( defined ( $state->{entries}{$filename}{revision} ) and not defined ( $meta->{revision} ) ); + $status ||= "Locally Removed" if ( defined ( $wrev ) and defined ( $meta->{revision} ) and -$wrev == $meta->{revision} ); + $status ||= "Unresolved Conflict" if ( defined ( $state->{entries}{$filename}{conflict} ) and $state->{entries}{$filename}{conflict} =~ /^\+=/ ); + $status ||= "File had conflicts on merge" if ( 0 ); + + $status ||= "Unknown"; + + my ($filepart) = filenamesplit($filename); + + print "M ===================================================================\n"; + print "M File: $filepart\tStatus: $status\n"; + if ( defined($state->{entries}{$filename}{revision}) ) + { + print "M Working revision:\t" . $state->{entries}{$filename}{revision} . "\n"; + } else { + print "M Working revision:\tNo entry for $filename\n"; + } + if ( defined($meta->{revision}) ) + { + print "M Repository revision:\t1." . $meta->{revision} . "\t$state->{CVSROOT}/$state->{module}/$filename,v\n"; + print "M Sticky Tag:\t\t(none)\n"; + print "M Sticky Date:\t\t(none)\n"; + print "M Sticky Options:\t\t(none)\n"; + } else { + print "M Repository revision:\tNo revision control file\n"; + } + print "M\n"; + } + + print "ok\n"; +} + +sub req_diff +{ + my ( $cmd, $data ) = @_; + + argsplit("diff"); + + $log->debug("req_diff : " . ( defined($data) ? $data : "[NULL]" )); + #$log->debug("status state : " . Dumper($state)); + + my ($revision1, $revision2); + if ( defined ( $state->{opt}{r} ) and ref $state->{opt}{r} eq "ARRAY" ) + { + $revision1 = $state->{opt}{r}[0]; + $revision2 = $state->{opt}{r}[1]; + } else { + $revision1 = $state->{opt}{r}; + } + + $revision1 =~ s/^1\.// if ( defined ( $revision1 ) ); + $revision2 =~ s/^1\.// if ( defined ( $revision2 ) ); + + $log->debug("Diffing revisions " . ( defined($revision1) ? $revision1 : "[NULL]" ) . " and " . ( defined($revision2) ? $revision2 : "[NULL]" ) ); + + # Grab a handle to the SQLite db and do any necessary updates + my $updater = GITCVS::updater->new($state->{CVSROOT}, $state->{module}, $log); + $updater->update(); + + # if no files were specified, we need to work out what files we should be providing status on ... + argsfromdir($updater); + + # foreach file specified on the command line ... + foreach my $filename ( @{$state->{args}} ) + { + $filename = filecleanup($filename); + + my ( $fh, $file1, $file2, $meta1, $meta2, $filediff ); + + my $wrev = revparse($filename); + + # We need _something_ to diff against + next unless ( defined ( $wrev ) ); + + # if we have a -r switch, use it + if ( defined ( $revision1 ) ) + { + ( undef, $file1 ) = tempfile( DIR => $TEMP_DIR, OPEN => 0 ); + $meta1 = $updater->getmeta($filename, $revision1); + unless ( defined ( $meta1 ) and $meta1->{filehash} ne "deleted" ) + { + print "E File $filename at revision 1.$revision1 doesn't exist\n"; + next; + } + transmitfile($meta1->{filehash}, { targetfile => $file1 }); + } + # otherwise we just use the working copy revision + else + { + ( undef, $file1 ) = tempfile( DIR => $TEMP_DIR, OPEN => 0 ); + $meta1 = $updater->getmeta($filename, $wrev); + transmitfile($meta1->{filehash}, { targetfile => $file1 }); + } + + # if we have a second -r switch, use it too + if ( defined ( $revision2 ) ) + { + ( undef, $file2 ) = tempfile( DIR => $TEMP_DIR, OPEN => 0 ); + $meta2 = $updater->getmeta($filename, $revision2); + + unless ( defined ( $meta2 ) and $meta2->{filehash} ne "deleted" ) + { + print "E File $filename at revision 1.$revision2 doesn't exist\n"; + next; + } + + transmitfile($meta2->{filehash}, { targetfile => $file2 }); + } + # otherwise we just use the working copy + else + { + $file2 = $state->{entries}{$filename}{modified_filename}; + } + + # if we have been given -r, and we don't have a $file2 yet, lets get one + if ( defined ( $revision1 ) and not defined ( $file2 ) ) + { + ( undef, $file2 ) = tempfile( DIR => $TEMP_DIR, OPEN => 0 ); + $meta2 = $updater->getmeta($filename, $wrev); + transmitfile($meta2->{filehash}, { targetfile => $file2 }); + } + + # We need to have retrieved something useful + next unless ( defined ( $meta1 ) ); + + # Files to date if the working copy and repo copy have the same revision, and the working copy is unmodified + next if ( not defined ( $meta2 ) and $wrev == $meta1->{revision} + and + ( ( $state->{entries}{$filename}{unchanged} and ( not defined ( $state->{entries}{$filename}{conflict} ) or $state->{entries}{$filename}{conflict} !~ /^\+=/ ) ) + or ( defined($state->{entries}{$filename}{modified_hash}) and $state->{entries}{$filename}{modified_hash} eq $meta1->{filehash} ) ) + ); + + # Apparently we only show diffs for locally modified files + next unless ( defined($meta2) or defined ( $state->{entries}{$filename}{modified_filename} ) ); + + print "M Index: $filename\n"; + print "M ===================================================================\n"; + print "M RCS file: $state->{CVSROOT}/$state->{module}/$filename,v\n"; + print "M retrieving revision 1.$meta1->{revision}\n" if ( defined ( $meta1 ) ); + print "M retrieving revision 1.$meta2->{revision}\n" if ( defined ( $meta2 ) ); + print "M diff "; + foreach my $opt ( keys %{$state->{opt}} ) + { + if ( ref $state->{opt}{$opt} eq "ARRAY" ) + { + foreach my $value ( @{$state->{opt}{$opt}} ) + { + print "-$opt $value "; + } + } else { + print "-$opt "; + print "$state->{opt}{$opt} " if ( defined ( $state->{opt}{$opt} ) ); + } + } + print "$filename\n"; + + $log->info("Diffing $filename -r $meta1->{revision} -r " . ( $meta2->{revision} or "workingcopy" )); + + ( $fh, $filediff ) = tempfile ( DIR => $TEMP_DIR ); + + if ( exists $state->{opt}{u} ) + { + system("diff -u -L '$filename revision 1.$meta1->{revision}' -L '$filename " . ( defined($meta2->{revision}) ? "revision 1.$meta2->{revision}" : "working copy" ) . "' $file1 $file2 > $filediff"); + } else { + system("diff $file1 $file2 > $filediff"); + } + + while ( <$fh> ) + { + print "M $_"; + } + close $fh; + } + + print "ok\n"; +} + +sub req_log +{ + my ( $cmd, $data ) = @_; + + argsplit("log"); + + $log->debug("req_log : " . ( defined($data) ? $data : "[NULL]" )); + #$log->debug("log state : " . Dumper($state)); + + my ( $minrev, $maxrev ); + if ( defined ( $state->{opt}{r} ) and $state->{opt}{r} =~ /([\d.]+)?(::?)([\d.]+)?/ ) + { + my $control = $2; + $minrev = $1; + $maxrev = $3; + $minrev =~ s/^1\.// if ( defined ( $minrev ) ); + $maxrev =~ s/^1\.// if ( defined ( $maxrev ) ); + $minrev++ if ( defined($minrev) and $control eq "::" ); + } + + # Grab a handle to the SQLite db and do any necessary updates + my $updater = GITCVS::updater->new($state->{CVSROOT}, $state->{module}, $log); + $updater->update(); + + # if no files were specified, we need to work out what files we should be providing status on ... + argsfromdir($updater); + + # foreach file specified on the command line ... + foreach my $filename ( @{$state->{args}} ) + { + $filename = filecleanup($filename); + + my $headmeta = $updater->getmeta($filename); + + my $revisions = $updater->getlog($filename); + my $totalrevisions = scalar(@$revisions); + + if ( defined ( $minrev ) ) + { + $log->debug("Removing revisions less than $minrev"); + while ( scalar(@$revisions) > 0 and $revisions->[-1]{revision} < $minrev ) + { + pop @$revisions; + } + } + if ( defined ( $maxrev ) ) + { + $log->debug("Removing revisions greater than $maxrev"); + while ( scalar(@$revisions) > 0 and $revisions->[0]{revision} > $maxrev ) + { + shift @$revisions; + } + } + + next unless ( scalar(@$revisions) ); + + print "M \n"; + print "M RCS file: $state->{CVSROOT}/$state->{module}/$filename,v\n"; + print "M Working file: $filename\n"; + print "M head: 1.$headmeta->{revision}\n"; + print "M branch:\n"; + print "M locks: strict\n"; + print "M access list:\n"; + print "M symbolic names:\n"; + print "M keyword substitution: kv\n"; + print "M total revisions: $totalrevisions;\tselected revisions: " . scalar(@$revisions) . "\n"; + print "M description:\n"; + + foreach my $revision ( @$revisions ) + { + print "M ----------------------------\n"; + print "M revision 1.$revision->{revision}\n"; + # reformat the date for log output + $revision->{modified} = sprintf('%04d/%02d/%02d %s', $3, $DATE_LIST->{$2}, $1, $4 ) if ( $revision->{modified} =~ /(\d+)\s+(\w+)\s+(\d+)\s+(\S+)/ and defined($DATE_LIST->{$2}) ); + $revision->{author} = cvs_author($revision->{author}); + print "M date: $revision->{modified}; author: $revision->{author}; state: " . ( $revision->{filehash} eq "deleted" ? "dead" : "Exp" ) . "; lines: +2 -3\n"; + my $commitmessage = $updater->commitmessage($revision->{commithash}); + $commitmessage =~ s/^/M /mg; + print $commitmessage . "\n"; + } + print "M =============================================================================\n"; + } + + print "ok\n"; +} + +sub req_annotate +{ + my ( $cmd, $data ) = @_; + + argsplit("annotate"); + + $log->info("req_annotate : " . ( defined($data) ? $data : "[NULL]" )); + #$log->debug("status state : " . Dumper($state)); + + # Grab a handle to the SQLite db and do any necessary updates + my $updater = GITCVS::updater->new($state->{CVSROOT}, $state->{module}, $log); + $updater->update(); + + # if no files were specified, we need to work out what files we should be providing annotate on ... + argsfromdir($updater); + + # we'll need a temporary checkout dir + setupWorkTree(); + + $log->info("Temp checkoutdir creation successful, basing annotate session work on '$work->{workDir}', index file is '$ENV{GIT_INDEX_FILE}'"); + + # foreach file specified on the command line ... + foreach my $filename ( @{$state->{args}} ) + { + $filename = filecleanup($filename); + + my $meta = $updater->getmeta($filename); + + next unless ( $meta->{revision} ); + + # get all the commits that this file was in + # in dense format -- aka skip dead revisions + my $revisions = $updater->gethistorydense($filename); + my $lastseenin = $revisions->[0][2]; + + # populate the temporary index based on the latest commit were we saw + # the file -- but do it cheaply without checking out any files + # TODO: if we got a revision from the client, use that instead + # to look up the commithash in sqlite (still good to default to + # the current head as we do now) + system("git", "read-tree", $lastseenin); + unless ($? == 0) + { + print "E error running git-read-tree $lastseenin $ENV{GIT_INDEX_FILE} $!\n"; + return; + } + $log->info("Created index '$ENV{GIT_INDEX_FILE}' with commit $lastseenin - exit status $?"); + + # do a checkout of the file + system('git', 'checkout-index', '-f', '-u', $filename); + unless ($? == 0) { + print "E error running git-checkout-index -f -u $filename : $!\n"; + return; + } + + $log->info("Annotate $filename"); + + # Prepare a file with the commits from the linearized + # history that annotate should know about. This prevents + # git-jsannotate telling us about commits we are hiding + # from the client. + + my $a_hints = "$work->{workDir}/.annotate_hints"; + if (!open(ANNOTATEHINTS, '>', $a_hints)) { + print "E failed to open '$a_hints' for writing: $!\n"; + return; + } + for (my $i=0; $i < @$revisions; $i++) + { + print ANNOTATEHINTS $revisions->[$i][2]; + if ($i+1 < @$revisions) { # have we got a parent? + print ANNOTATEHINTS ' ' . $revisions->[$i+1][2]; + } + print ANNOTATEHINTS "\n"; + } + + print ANNOTATEHINTS "\n"; + close ANNOTATEHINTS + or (print "E failed to write $a_hints: $!\n"), return; + + my @cmd = (qw(git annotate -l -S), $a_hints, $filename); + if (!open(ANNOTATE, "-|", @cmd)) { + print "E error invoking ". join(' ',@cmd) .": $!\n"; + return; + } + my $metadata = {}; + print "E Annotations for $filename\n"; + print "E ***************\n"; + while ( ) + { + if (m/^([a-zA-Z0-9]{40})\t\([^\)]*\)(.*)$/i) + { + my $commithash = $1; + my $data = $2; + unless ( defined ( $metadata->{$commithash} ) ) + { + $metadata->{$commithash} = $updater->getmeta($filename, $commithash); + $metadata->{$commithash}{author} = cvs_author($metadata->{$commithash}{author}); + $metadata->{$commithash}{modified} = sprintf("%02d-%s-%02d", $1, $2, $3) if ( $metadata->{$commithash}{modified} =~ /^(\d+)\s(\w+)\s\d\d(\d\d)/ ); + } + printf("M 1.%-5d (%-8s %10s): %s\n", + $metadata->{$commithash}{revision}, + $metadata->{$commithash}{author}, + $metadata->{$commithash}{modified}, + $data + ); + } else { + $log->warn("Error in annotate output! LINE: $_"); + print "E Annotate error \n"; + next; + } + } + close ANNOTATE; + } + + # done; get out of the tempdir + cleanupWorkTree(); + + print "ok\n"; + +} + +# This method takes the state->{arguments} array and produces two new arrays. +# The first is $state->{args} which is everything before the '--' argument, and +# the second is $state->{files} which is everything after it. +sub argsplit +{ + $state->{args} = []; + $state->{files} = []; + $state->{opt} = {}; + + return unless( defined($state->{arguments}) and ref $state->{arguments} eq "ARRAY" ); + + my $type = shift; + + if ( defined($type) ) + { + my $opt = {}; + $opt = { A => 0, N => 0, P => 0, R => 0, c => 0, f => 0, l => 0, n => 0, p => 0, s => 0, r => 1, D => 1, d => 1, k => 1, j => 1, } if ( $type eq "co" ); + $opt = { v => 0, l => 0, R => 0 } if ( $type eq "status" ); + $opt = { A => 0, P => 0, C => 0, d => 0, f => 0, l => 0, R => 0, p => 0, k => 1, r => 1, D => 1, j => 1, I => 1, W => 1 } if ( $type eq "update" ); + $opt = { l => 0, R => 0, k => 1, D => 1, D => 1, r => 2 } if ( $type eq "diff" ); + $opt = { c => 0, R => 0, l => 0, f => 0, F => 1, m => 1, r => 1 } if ( $type eq "ci" ); + $opt = { k => 1, m => 1 } if ( $type eq "add" ); + $opt = { f => 0, l => 0, R => 0 } if ( $type eq "remove" ); + $opt = { l => 0, b => 0, h => 0, R => 0, t => 0, N => 0, S => 0, r => 1, d => 1, s => 1, w => 1 } if ( $type eq "log" ); + + + while ( scalar ( @{$state->{arguments}} ) > 0 ) + { + my $arg = shift @{$state->{arguments}}; + + next if ( $arg eq "--" ); + next unless ( $arg =~ /\S/ ); + + # if the argument looks like a switch + if ( $arg =~ /^-(\w)(.*)/ ) + { + # if it's a switch that takes an argument + if ( $opt->{$1} ) + { + # If this switch has already been provided + if ( $opt->{$1} > 1 and exists ( $state->{opt}{$1} ) ) + { + $state->{opt}{$1} = [ $state->{opt}{$1} ]; + if ( length($2) > 0 ) + { + push @{$state->{opt}{$1}},$2; + } else { + push @{$state->{opt}{$1}}, shift @{$state->{arguments}}; + } + } else { + # if there's extra data in the arg, use that as the argument for the switch + if ( length($2) > 0 ) + { + $state->{opt}{$1} = $2; + } else { + $state->{opt}{$1} = shift @{$state->{arguments}}; + } + } + } else { + $state->{opt}{$1} = undef; + } + } + else + { + push @{$state->{args}}, $arg; + } + } + } + else + { + my $mode = 0; + + foreach my $value ( @{$state->{arguments}} ) + { + if ( $value eq "--" ) + { + $mode++; + next; + } + push @{$state->{args}}, $value if ( $mode == 0 ); + push @{$state->{files}}, $value if ( $mode == 1 ); + } + } +} + +# This method uses $state->{directory} to populate $state->{args} with a list of filenames +sub argsfromdir +{ + my $updater = shift; + + $state->{args} = [] if ( scalar(@{$state->{args}}) == 1 and $state->{args}[0] eq "." ); + + return if ( scalar ( @{$state->{args}} ) > 1 ); + + my @gethead = @{$updater->gethead}; + + # push added files + foreach my $file (keys %{$state->{entries}}) { + if ( exists $state->{entries}{$file}{revision} && + $state->{entries}{$file}{revision} == 0 ) + { + push @gethead, { name => $file, filehash => 'added' }; + } + } + + if ( scalar(@{$state->{args}}) == 1 ) + { + my $arg = $state->{args}[0]; + $arg .= $state->{prependdir} if ( defined ( $state->{prependdir} ) ); + + $log->info("Only one arg specified, checking for directory expansion on '$arg'"); + + foreach my $file ( @gethead ) + { + next if ( $file->{filehash} eq "deleted" and not defined ( $state->{entries}{$file->{name}} ) ); + next unless ( $file->{name} =~ /^$arg\// or $file->{name} eq $arg ); + push @{$state->{args}}, $file->{name}; + } + + shift @{$state->{args}} if ( scalar(@{$state->{args}}) > 1 ); + } else { + $log->info("Only one arg specified, populating file list automatically"); + + $state->{args} = []; + + foreach my $file ( @gethead ) + { + next if ( $file->{filehash} eq "deleted" and not defined ( $state->{entries}{$file->{name}} ) ); + next unless ( $file->{name} =~ s/^$state->{prependdir}// ); + push @{$state->{args}}, $file->{name}; + } + } +} + +# This method cleans up the $state variable after a command that uses arguments has run +sub statecleanup +{ + $state->{files} = []; + $state->{args} = []; + $state->{arguments} = []; + $state->{entries} = {}; +} + +sub revparse +{ + my $filename = shift; + + return undef unless ( defined ( $state->{entries}{$filename}{revision} ) ); + + return $1 if ( $state->{entries}{$filename}{revision} =~ /^1\.(\d+)/ ); + return -$1 if ( $state->{entries}{$filename}{revision} =~ /^-1\.(\d+)/ ); + + return undef; +} + +# This method takes a file hash and does a CVS "file transfer". Its +# exact behaviour depends on a second, optional hash table argument: +# - If $options->{targetfile}, dump the contents to that file; +# - If $options->{print}, use M/MT to transmit the contents one line +# at a time; +# - Otherwise, transmit the size of the file, followed by the file +# contents. +sub transmitfile +{ + my $filehash = shift; + my $options = shift; + + if ( defined ( $filehash ) and $filehash eq "deleted" ) + { + $log->warn("filehash is 'deleted'"); + return; + } + + die "Need filehash" unless ( defined ( $filehash ) and $filehash =~ /^[a-zA-Z0-9]{40}$/ ); + + my $type = `git cat-file -t $filehash`; + chomp $type; + + die ( "Invalid type '$type' (expected 'blob')" ) unless ( defined ( $type ) and $type eq "blob" ); + + my $size = `git cat-file -s $filehash`; + chomp $size; + + $log->debug("transmitfile($filehash) size=$size, type=$type"); + + if ( open my $fh, '-|', "git", "cat-file", "blob", $filehash ) + { + if ( defined ( $options->{targetfile} ) ) + { + my $targetfile = $options->{targetfile}; + open NEWFILE, ">", $targetfile or die("Couldn't open '$targetfile' for writing : $!"); + print NEWFILE $_ while ( <$fh> ); + close NEWFILE or die("Failed to write '$targetfile': $!"); + } elsif ( defined ( $options->{print} ) && $options->{print} ) { + while ( <$fh> ) { + if( /\n\z/ ) { + print 'M ', $_; + } else { + print 'MT text ', $_, "\n"; + } + } + } else { + print "$size\n"; + print while ( <$fh> ); + } + close $fh or die ("Couldn't close filehandle for transmitfile(): $!"); + } else { + die("Couldn't execute git-cat-file"); + } +} + +# This method takes a file name, and returns ( $dirpart, $filepart ) which +# refers to the directory portion and the file portion of the filename +# respectively +sub filenamesplit +{ + my $filename = shift; + my $fixforlocaldir = shift; + + my ( $filepart, $dirpart ) = ( $filename, "." ); + ( $filepart, $dirpart ) = ( $2, $1 ) if ( $filename =~ /(.*)\/(.*)/ ); + $dirpart .= "/"; + + if ( $fixforlocaldir ) + { + $dirpart =~ s/^$state->{prependdir}//; + } + + return ( $filepart, $dirpart ); +} + +sub filecleanup +{ + my $filename = shift; + + return undef unless(defined($filename)); + if ( $filename =~ /^\// ) + { + print "E absolute filenames '$filename' not supported by server\n"; + return undef; + } + + $filename =~ s/^\.\///g; + $filename = $state->{prependdir} . $filename; + return $filename; +} + +sub validateGitDir +{ + if( !defined($state->{CVSROOT}) ) + { + print "error 1 CVSROOT not specified\n"; + cleanupWorkTree(); + exit; + } + if( $ENV{GIT_DIR} ne ($state->{CVSROOT} . '/') ) + { + print "error 1 Internally inconsistent CVSROOT\n"; + cleanupWorkTree(); + exit; + } +} + +# Setup working directory in a work tree with the requested version +# loaded in the index. +sub setupWorkTree +{ + my ($ver) = @_; + + validateGitDir(); + + if( ( defined($work->{state}) && $work->{state} != 1 ) || + defined($work->{tmpDir}) ) + { + $log->warn("Bad work tree state management"); + print "error 1 Internal setup multiple work trees without cleanup\n"; + cleanupWorkTree(); + exit; + } + + $work->{workDir} = tempdir ( DIR => $TEMP_DIR ); + + if( !defined($work->{index}) ) + { + (undef, $work->{index}) = tempfile ( DIR => $TEMP_DIR, OPEN => 0 ); + } + + chdir $work->{workDir} or + die "Unable to chdir to $work->{workDir}\n"; + + $log->info("Setting up GIT_WORK_TREE as '.' in '$work->{workDir}', index file is '$work->{index}'"); + + $ENV{GIT_WORK_TREE} = "."; + $ENV{GIT_INDEX_FILE} = $work->{index}; + $work->{state} = 2; + + if($ver) + { + system("git","read-tree",$ver); + unless ($? == 0) + { + $log->warn("Error running git-read-tree"); + die "Error running git-read-tree $ver in $work->{workDir} $!\n"; + } + } + # else # req_annotate reads tree for each file +} + +# Ensure current directory is in some kind of working directory, +# with a recent version loaded in the index. +sub ensureWorkTree +{ + if( defined($work->{tmpDir}) ) + { + $log->warn("Bad work tree state management [ensureWorkTree()]"); + print "error 1 Internal setup multiple dirs without cleanup\n"; + cleanupWorkTree(); + exit; + } + if( $work->{state} ) + { + return; + } + + validateGitDir(); + + if( !defined($work->{emptyDir}) ) + { + $work->{emptyDir} = tempdir ( DIR => $TEMP_DIR, OPEN => 0); + } + chdir $work->{emptyDir} or + die "Unable to chdir to $work->{emptyDir}\n"; + + my $ver = `git show-ref -s refs/heads/$state->{module}`; + chomp $ver; + if ($ver !~ /^[0-9a-f]{40}$/) + { + $log->warn("Error from git show-ref -s refs/head$state->{module}"); + print "error 1 cannot find the current HEAD of module"; + cleanupWorkTree(); + exit; + } + + if( !defined($work->{index}) ) + { + (undef, $work->{index}) = tempfile ( DIR => $TEMP_DIR, OPEN => 0 ); + } + + $ENV{GIT_WORK_TREE} = "."; + $ENV{GIT_INDEX_FILE} = $work->{index}; + $work->{state} = 1; + + system("git","read-tree",$ver); + unless ($? == 0) + { + die "Error running git-read-tree $ver $!\n"; + } +} + +# Cleanup working directory that is not needed any longer. +sub cleanupWorkTree +{ + if( ! $work->{state} ) + { + return; + } + + chdir "/" or die "Unable to chdir '/'\n"; + + if( defined($work->{workDir}) ) + { + rmtree( $work->{workDir} ); + undef $work->{workDir}; + } + undef $work->{state}; +} + +# Setup a temporary directory (not a working tree), typically for +# merging dirty state as in req_update. +sub setupTmpDir +{ + $work->{tmpDir} = tempdir ( DIR => $TEMP_DIR ); + chdir $work->{tmpDir} or die "Unable to chdir $work->{tmpDir}\n"; + + return $work->{tmpDir}; +} + +# Clean up a previously setupTmpDir. Restore previous work tree if +# appropriate. +sub cleanupTmpDir +{ + if ( !defined($work->{tmpDir}) ) + { + $log->warn("cleanup tmpdir that has not been setup"); + die "Cleanup tmpDir that has not been setup\n"; + } + if( defined($work->{state}) ) + { + if( $work->{state} == 1 ) + { + chdir $work->{emptyDir} or + die "Unable to chdir to $work->{emptyDir}\n"; + } + elsif( $work->{state} == 2 ) + { + chdir $work->{workDir} or + die "Unable to chdir to $work->{emptyDir}\n"; + } + else + { + $log->warn("Inconsistent work dir state"); + die "Inconsistent work dir state\n"; + } + } + else + { + chdir "/" or die "Unable to chdir '/'\n"; + } +} + +# Given a path, this function returns a string containing the kopts +# that should go into that path's Entries line. For example, a binary +# file should get -kb. +sub kopts_from_path +{ + my ($path, $srcType, $name) = @_; + + if ( defined ( $cfg->{gitcvs}{usecrlfattr} ) and + $cfg->{gitcvs}{usecrlfattr} =~ /\s*(1|true|yes)\s*$/i ) + { + my ($val) = check_attr( "text", $path ); + if ( $val eq "unspecified" ) + { + $val = check_attr( "crlf", $path ); + } + if ( $val eq "unset" ) + { + return "-kb" + } + elsif ( check_attr( "eol", $path ) ne "unspecified" || + $val eq "set" || $val eq "input" ) + { + return ""; + } + else + { + $log->info("Unrecognized check_attr crlf $path : $val"); + } + } + + if ( defined ( $cfg->{gitcvs}{allbinary} ) ) + { + if( ($cfg->{gitcvs}{allbinary} =~ /^\s*(1|true|yes)\s*$/i) ) + { + return "-kb"; + } + elsif( ($cfg->{gitcvs}{allbinary} =~ /^\s*guess\s*$/i) ) + { + if( $srcType eq "sha1Or-k" && + !defined($name) ) + { + my ($ret)=$state->{entries}{$path}{options}; + if( !defined($ret) ) + { + $ret=$state->{opt}{k}; + if(defined($ret)) + { + $ret="-k$ret"; + } + else + { + $ret=""; + } + } + if( ! ($ret=~/^(|-kb|-kkv|-kkvl|-kk|-ko|-kv)$/) ) + { + print "E Bad -k option\n"; + $log->warn("Bad -k option: $ret"); + die "Error: Bad -k option: $ret\n"; + } + + return $ret; + } + else + { + if( is_binary($srcType,$name) ) + { + $log->debug("... as binary"); + return "-kb"; + } + else + { + $log->debug("... as text"); + } + } + } + } + # Return "" to give no special treatment to any path + return ""; +} + +sub check_attr +{ + my ($attr,$path) = @_; + ensureWorkTree(); + if ( open my $fh, '-|', "git", "check-attr", $attr, "--", $path ) + { + my $val = <$fh>; + close $fh; + $val =~ s/.*: ([^:\r\n]*)\s*$/$1/; + return $val; + } + else + { + return undef; + } +} + +# This should have the same heuristics as convert.c:is_binary() and related. +# Note that the bare CR test is done by callers in convert.c. +sub is_binary +{ + my ($srcType,$name) = @_; + $log->debug("is_binary($srcType,$name)"); + + # Minimize amount of interpreted code run in the inner per-character + # loop for large files, by totalling each character value and + # then analyzing the totals. + my @counts; + my $i; + for($i=0;$i<256;$i++) + { + $counts[$i]=0; + } + + my $fh = open_blob_or_die($srcType,$name); + my $line; + while( defined($line=<$fh>) ) + { + # Any '\0' and bare CR are considered binary. + if( $line =~ /\0|(\r[^\n])/ ) + { + close($fh); + return 1; + } + + # Count up each character in the line: + my $len=length($line); + for($i=0;$i<$len;$i++) + { + $counts[ord(substr($line,$i,1))]++; + } + } + close $fh; + + # Don't count CR and LF as either printable/nonprintable + $counts[ord("\n")]=0; + $counts[ord("\r")]=0; + + # Categorize individual character count into printable and nonprintable: + my $printable=0; + my $nonprintable=0; + for($i=0;$i<256;$i++) + { + if( $i < 32 && + $i != ord("\b") && + $i != ord("\t") && + $i != 033 && # ESC + $i != 014 ) # FF + { + $nonprintable+=$counts[$i]; + } + elsif( $i==127 ) # DEL + { + $nonprintable+=$counts[$i]; + } + else + { + $printable+=$counts[$i]; + } + } + + return ($printable >> 7) < $nonprintable; +} + +# Returns open file handle. Possible invocations: +# - open_blob_or_die("file",$filename); +# - open_blob_or_die("sha1",$filehash); +sub open_blob_or_die +{ + my ($srcType,$name) = @_; + my ($fh); + if( $srcType eq "file" ) + { + if( !open $fh,"<",$name ) + { + $log->warn("Unable to open file $name: $!"); + die "Unable to open file $name: $!\n"; + } + } + elsif( $srcType eq "sha1" || $srcType eq "sha1Or-k" ) + { + unless ( defined ( $name ) and $name =~ /^[a-zA-Z0-9]{40}$/ ) + { + $log->warn("Need filehash"); + die "Need filehash\n"; + } + + my $type = `git cat-file -t $name`; + chomp $type; + + unless ( defined ( $type ) and $type eq "blob" ) + { + $log->warn("Invalid type '$type' for '$name'"); + die ( "Invalid type '$type' (expected 'blob')" ) + } + + my $size = `git cat-file -s $name`; + chomp $size; + + $log->debug("open_blob_or_die($name) size=$size, type=$type"); + + unless( open $fh, '-|', "git", "cat-file", "blob", $name ) + { + $log->warn("Unable to open sha1 $name"); + die "Unable to open sha1 $name\n"; + } + } + else + { + $log->warn("Unknown type of blob source: $srcType"); + die "Unknown type of blob source: $srcType\n"; + } + return $fh; +} + +# Generate a CVS author name from Git author information, by taking the local +# part of the email address and replacing characters not in the Portable +# Filename Character Set (see IEEE Std 1003.1-2001, 3.276) by underscores. CVS +# Login names are Unix login names, which should be restricted to this +# character set. +sub cvs_author +{ + my $author_line = shift; + (my $author) = $author_line =~ /<([^@>]*)/; + + $author =~ s/[^-a-zA-Z0-9_.]/_/g; + $author =~ s/^-/_/; + + $author; +} + + +sub descramble +{ + # This table is from src/scramble.c in the CVS source + my @SHIFTS = ( + 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, + 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, + 114,120, 53, 79, 96,109, 72,108, 70, 64, 76, 67,116, 74, 68, 87, + 111, 52, 75,119, 49, 34, 82, 81, 95, 65,112, 86,118,110,122,105, + 41, 57, 83, 43, 46,102, 40, 89, 38,103, 45, 50, 42,123, 91, 35, + 125, 55, 54, 66,124,126, 59, 47, 92, 71,115, 78, 88,107,106, 56, + 36,121,117,104,101,100, 69, 73, 99, 63, 94, 93, 39, 37, 61, 48, + 58,113, 32, 90, 44, 98, 60, 51, 33, 97, 62, 77, 84, 80, 85,223, + 225,216,187,166,229,189,222,188,141,249,148,200,184,136,248,190, + 199,170,181,204,138,232,218,183,255,234,220,247,213,203,226,193, + 174,172,228,252,217,201,131,230,197,211,145,238,161,179,160,212, + 207,221,254,173,202,146,224,151,140,196,205,130,135,133,143,246, + 192,159,244,239,185,168,215,144,139,165,180,157,147,186,214,176, + 227,231,219,169,175,156,206,198,129,164,150,210,154,177,134,127, + 182,128,158,208,162,132,167,209,149,241,153,251,237,236,171,195, + 243,233,253,240,194,250,191,155,142,137,245,235,163,242,178,152 + ); + my ($str) = @_; + + # This should never happen, the same password format (A) has been + # used by CVS since the beginning of time + { + my $fmt = substr($str, 0, 1); + die "invalid password format `$fmt'" unless $fmt eq 'A'; + } + + my @str = unpack "C*", substr($str, 1); + my $ret = join '', map { chr $SHIFTS[$_] } @str; + return $ret; +} + + +package GITCVS::log; + +#### +#### Copyright The Open University UK - 2006. +#### +#### Authors: Martyn Smith +#### Martin Langhoff +#### +#### + +use strict; +use warnings; + +=head1 NAME + +GITCVS::log + +=head1 DESCRIPTION + +This module provides very crude logging with a similar interface to +Log::Log4perl + +=head1 METHODS + +=cut + +=head2 new + +Creates a new log object, optionally you can specify a filename here to +indicate the file to log to. If no log file is specified, you can specify one +later with method setfile, or indicate you no longer want logging with method +nofile. + +Until one of these methods is called, all log calls will buffer messages ready +to write out. + +=cut +sub new +{ + my $class = shift; + my $filename = shift; + + my $self = {}; + + bless $self, $class; + + if ( defined ( $filename ) ) + { + open $self->{fh}, ">>", $filename or die("Couldn't open '$filename' for writing : $!"); + } + + return $self; +} + +=head2 setfile + +This methods takes a filename, and attempts to open that file as the log file. +If successful, all buffered data is written out to the file, and any further +logging is written directly to the file. + +=cut +sub setfile +{ + my $self = shift; + my $filename = shift; + + if ( defined ( $filename ) ) + { + open $self->{fh}, ">>", $filename or die("Couldn't open '$filename' for writing : $!"); + } + + return unless ( defined ( $self->{buffer} ) and ref $self->{buffer} eq "ARRAY" ); + + while ( my $line = shift @{$self->{buffer}} ) + { + print {$self->{fh}} $line; + } +} + +=head2 nofile + +This method indicates no logging is going to be used. It flushes any entries in +the internal buffer, and sets a flag to ensure no further data is put there. + +=cut +sub nofile +{ + my $self = shift; + + $self->{nolog} = 1; + + return unless ( defined ( $self->{buffer} ) and ref $self->{buffer} eq "ARRAY" ); + + $self->{buffer} = []; +} + +=head2 _logopen + +Internal method. Returns true if the log file is open, false otherwise. + +=cut +sub _logopen +{ + my $self = shift; + + return 1 if ( defined ( $self->{fh} ) and ref $self->{fh} eq "GLOB" ); + return 0; +} + +=head2 debug info warn fatal + +These four methods are wrappers to _log. They provide the actual interface for +logging data. + +=cut +sub debug { my $self = shift; $self->_log("debug", @_); } +sub info { my $self = shift; $self->_log("info" , @_); } +sub warn { my $self = shift; $self->_log("warn" , @_); } +sub fatal { my $self = shift; $self->_log("fatal", @_); } + +=head2 _log + +This is an internal method called by the logging functions. It generates a +timestamp and pushes the logged line either to file, or internal buffer. + +=cut +sub _log +{ + my $self = shift; + my $level = shift; + + return if ( $self->{nolog} ); + + my @time = localtime; + my $timestring = sprintf("%4d-%02d-%02d %02d:%02d:%02d : %-5s", + $time[5] + 1900, + $time[4] + 1, + $time[3], + $time[2], + $time[1], + $time[0], + uc $level, + ); + + if ( $self->_logopen ) + { + print {$self->{fh}} $timestring . " - " . join(" ",@_) . "\n"; + } else { + push @{$self->{buffer}}, $timestring . " - " . join(" ",@_) . "\n"; + } +} + +=head2 DESTROY + +This method simply closes the file handle if one is open + +=cut +sub DESTROY +{ + my $self = shift; + + if ( $self->_logopen ) + { + close $self->{fh}; + } +} + +package GITCVS::updater; + +#### +#### Copyright The Open University UK - 2006. +#### +#### Authors: Martyn Smith +#### Martin Langhoff +#### +#### + +use strict; +use warnings; +use DBI; + +=head1 METHODS + +=cut + +=head2 new + +=cut +sub new +{ + my $class = shift; + my $config = shift; + my $module = shift; + my $log = shift; + + die "Need to specify a git repository" unless ( defined($config) and -d $config ); + die "Need to specify a module" unless ( defined($module) ); + + $class = ref($class) || $class; + + my $self = {}; + + bless $self, $class; + + $self->{valid_tables} = {'revision' => 1, + 'revision_ix1' => 1, + 'revision_ix2' => 1, + 'head' => 1, + 'head_ix1' => 1, + 'properties' => 1, + 'commitmsgs' => 1}; + + $self->{module} = $module; + $self->{git_path} = $config . "/"; + + $self->{log} = $log; + + die "Git repo '$self->{git_path}' doesn't exist" unless ( -d $self->{git_path} ); + + $self->{dbdriver} = $cfg->{gitcvs}{$state->{method}}{dbdriver} || + $cfg->{gitcvs}{dbdriver} || "SQLite"; + $self->{dbname} = $cfg->{gitcvs}{$state->{method}}{dbname} || + $cfg->{gitcvs}{dbname} || "%Ggitcvs.%m.sqlite"; + $self->{dbuser} = $cfg->{gitcvs}{$state->{method}}{dbuser} || + $cfg->{gitcvs}{dbuser} || ""; + $self->{dbpass} = $cfg->{gitcvs}{$state->{method}}{dbpass} || + $cfg->{gitcvs}{dbpass} || ""; + $self->{dbtablenameprefix} = $cfg->{gitcvs}{$state->{method}}{dbtablenameprefix} || + $cfg->{gitcvs}{dbtablenameprefix} || ""; + my %mapping = ( m => $module, + a => $state->{method}, + u => getlogin || getpwuid($<) || $<, + G => $self->{git_path}, + g => mangle_dirname($self->{git_path}), + ); + $self->{dbname} =~ s/%([mauGg])/$mapping{$1}/eg; + $self->{dbuser} =~ s/%([mauGg])/$mapping{$1}/eg; + $self->{dbtablenameprefix} =~ s/%([mauGg])/$mapping{$1}/eg; + $self->{dbtablenameprefix} = mangle_tablename($self->{dbtablenameprefix}); + + die "Invalid char ':' in dbdriver" if $self->{dbdriver} =~ /:/; + die "Invalid char ';' in dbname" if $self->{dbname} =~ /;/; + $self->{dbh} = DBI->connect("dbi:$self->{dbdriver}:dbname=$self->{dbname}", + $self->{dbuser}, + $self->{dbpass}); + die "Error connecting to database\n" unless defined $self->{dbh}; + + $self->{tables} = {}; + foreach my $table ( keys %{$self->{dbh}->table_info(undef,undef,undef,'TABLE')->fetchall_hashref('TABLE_NAME')} ) + { + $self->{tables}{$table} = 1; + } + + # Construct the revision table if required + unless ( $self->{tables}{$self->tablename("revision")} ) + { + my $tablename = $self->tablename("revision"); + my $ix1name = $self->tablename("revision_ix1"); + my $ix2name = $self->tablename("revision_ix2"); + $self->{dbh}->do(" + CREATE TABLE $tablename ( + name TEXT NOT NULL, + revision INTEGER NOT NULL, + filehash TEXT NOT NULL, + commithash TEXT NOT NULL, + author TEXT NOT NULL, + modified TEXT NOT NULL, + mode TEXT NOT NULL + ) + "); + $self->{dbh}->do(" + CREATE INDEX $ix1name + ON $tablename (name,revision) + "); + $self->{dbh}->do(" + CREATE INDEX $ix2name + ON $tablename (name,commithash) + "); + } + + # Construct the head table if required + unless ( $self->{tables}{$self->tablename("head")} ) + { + my $tablename = $self->tablename("head"); + my $ix1name = $self->tablename("head_ix1"); + $self->{dbh}->do(" + CREATE TABLE $tablename ( + name TEXT NOT NULL, + revision INTEGER NOT NULL, + filehash TEXT NOT NULL, + commithash TEXT NOT NULL, + author TEXT NOT NULL, + modified TEXT NOT NULL, + mode TEXT NOT NULL + ) + "); + $self->{dbh}->do(" + CREATE INDEX $ix1name + ON $tablename (name) + "); + } + + # Construct the properties table if required + unless ( $self->{tables}{$self->tablename("properties")} ) + { + my $tablename = $self->tablename("properties"); + $self->{dbh}->do(" + CREATE TABLE $tablename ( + key TEXT NOT NULL PRIMARY KEY, + value TEXT + ) + "); + } + + # Construct the commitmsgs table if required + unless ( $self->{tables}{$self->tablename("commitmsgs")} ) + { + my $tablename = $self->tablename("commitmsgs"); + $self->{dbh}->do(" + CREATE TABLE $tablename ( + key TEXT NOT NULL PRIMARY KEY, + value TEXT + ) + "); + } + + return $self; +} + +=head2 tablename + +=cut +sub tablename +{ + my $self = shift; + my $name = shift; + + if (exists $self->{valid_tables}{$name}) { + return $self->{dbtablenameprefix} . $name; + } else { + return undef; + } +} + +=head2 update + +=cut +sub update +{ + my $self = shift; + + # first lets get the commit list + $ENV{GIT_DIR} = $self->{git_path}; + + my $commitsha1 = `git rev-parse $self->{module}`; + chomp $commitsha1; + + my $commitinfo = `git cat-file commit $self->{module} 2>&1`; + unless ( $commitinfo =~ /tree\s+[a-zA-Z0-9]{40}/ ) + { + die("Invalid module '$self->{module}'"); + } + + + my $git_log; + my $lastcommit = $self->_get_prop("last_commit"); + + if (defined $lastcommit && $lastcommit eq $commitsha1) { # up-to-date + return 1; + } + + # Start exclusive lock here... + $self->{dbh}->begin_work() or die "Cannot lock database for BEGIN"; + + # TODO: log processing is memory bound + # if we can parse into a 2nd file that is in reverse order + # we can probably do something really efficient + my @git_log_params = ('--pretty', '--parents', '--topo-order'); + + if (defined $lastcommit) { + push @git_log_params, "$lastcommit..$self->{module}"; + } else { + push @git_log_params, $self->{module}; + } + # git-rev-list is the backend / plumbing version of git-log + open(GITLOG, '-|', 'git', 'rev-list', @git_log_params) or die "Cannot call git-rev-list: $!"; + + my @commits; + + my %commit = (); + + while ( ) + { + chomp; + if (m/^commit\s+(.*)$/) { + # on ^commit lines put the just seen commit in the stack + # and prime things for the next one + if (keys %commit) { + my %copy = %commit; + unshift @commits, \%copy; + %commit = (); + } + my @parents = split(m/\s+/, $1); + $commit{hash} = shift @parents; + $commit{parents} = \@parents; + } elsif (m/^(\w+?):\s+(.*)$/ && !exists($commit{message})) { + # on rfc822-like lines seen before we see any message, + # lowercase the entry and put it in the hash as key-value + $commit{lc($1)} = $2; + } else { + # message lines - skip initial empty line + # and trim whitespace + if (!exists($commit{message}) && m/^\s*$/) { + # define it to mark the end of headers + $commit{message} = ''; + next; + } + s/^\s+//; s/\s+$//; # trim ws + $commit{message} .= $_ . "\n"; + } + } + close GITLOG; + + unshift @commits, \%commit if ( keys %commit ); + + # Now all the commits are in the @commits bucket + # ordered by time DESC. for each commit that needs processing, + # determine whether it's following the last head we've seen or if + # it's on its own branch, grab a file list, and add whatever's changed + # NOTE: $lastcommit refers to the last commit from previous run + # $lastpicked is the last commit we picked in this run + my $lastpicked; + my $head = {}; + if (defined $lastcommit) { + $lastpicked = $lastcommit; + } + + my $committotal = scalar(@commits); + my $commitcount = 0; + + # Load the head table into $head (for cached lookups during the update process) + foreach my $file ( @{$self->gethead()} ) + { + $head->{$file->{name}} = $file; + } + + foreach my $commit ( @commits ) + { + $self->{log}->debug("GITCVS::updater - Processing commit $commit->{hash} (" . (++$commitcount) . " of $committotal)"); + if (defined $lastpicked) + { + if (!in_array($lastpicked, @{$commit->{parents}})) + { + # skip, we'll see this delta + # as part of a merge later + # warn "skipping off-track $commit->{hash}\n"; + next; + } elsif (@{$commit->{parents}} > 1) { + # it is a merge commit, for each parent that is + # not $lastpicked, see if we can get a log + # from the merge-base to that parent to put it + # in the message as a merge summary. + my @parents = @{$commit->{parents}}; + foreach my $parent (@parents) { + # git-merge-base can potentially (but rarely) throw + # several candidate merge bases. let's assume + # that the first one is the best one. + if ($parent eq $lastpicked) { + next; + } + my $base = eval { + safe_pipe_capture('git', 'merge-base', + $lastpicked, $parent); + }; + # The two branches may not be related at all, + # in which case merge base simply fails to find + # any, but that's Ok. + next if ($@); + + chomp $base; + if ($base) { + my @merged; + # print "want to log between $base $parent \n"; + open(GITLOG, '-|', 'git', 'log', '--pretty=medium', "$base..$parent") + or die "Cannot call git-log: $!"; + my $mergedhash; + while () { + chomp; + if (!defined $mergedhash) { + if (m/^commit\s+(.+)$/) { + $mergedhash = $1; + } else { + next; + } + } else { + # grab the first line that looks non-rfc822 + # aka has content after leading space + if (m/^\s+(\S.*)$/) { + my $title = $1; + $title = substr($title,0,100); # truncate + unshift @merged, "$mergedhash $title"; + undef $mergedhash; + } + } + } + close GITLOG; + if (@merged) { + $commit->{mergemsg} = $commit->{message}; + $commit->{mergemsg} .= "\nSummary of merged commits:\n\n"; + foreach my $summary (@merged) { + $commit->{mergemsg} .= "\t$summary\n"; + } + $commit->{mergemsg} .= "\n\n"; + # print "Message for $commit->{hash} \n$commit->{mergemsg}"; + } + } + } + } + } + + # convert the date to CVS-happy format + $commit->{date} = "$2 $1 $4 $3 $5" if ( $commit->{date} =~ /^\w+\s+(\w+)\s+(\d+)\s+(\d+:\d+:\d+)\s+(\d+)\s+([+-]\d+)$/ ); + + if ( defined ( $lastpicked ) ) + { + my $filepipe = open(FILELIST, '-|', 'git', 'diff-tree', '-z', '-r', $lastpicked, $commit->{hash}) or die("Cannot call git-diff-tree : $!"); + local ($/) = "\0"; + while ( ) + { + chomp; + unless ( /^:\d{6}\s+\d{3}(\d)\d{2}\s+[a-zA-Z0-9]{40}\s+([a-zA-Z0-9]{40})\s+(\w)$/o ) + { + die("Couldn't process git-diff-tree line : $_"); + } + my ($mode, $hash, $change) = ($1, $2, $3); + my $name = ; + chomp($name); + + # $log->debug("File mode=$mode, hash=$hash, change=$change, name=$name"); + + my $git_perms = ""; + $git_perms .= "r" if ( $mode & 4 ); + $git_perms .= "w" if ( $mode & 2 ); + $git_perms .= "x" if ( $mode & 1 ); + $git_perms = "rw" if ( $git_perms eq "" ); + + if ( $change eq "D" ) + { + #$log->debug("DELETE $name"); + $head->{$name} = { + name => $name, + revision => $head->{$name}{revision} + 1, + filehash => "deleted", + commithash => $commit->{hash}, + modified => $commit->{date}, + author => $commit->{author}, + mode => $git_perms, + }; + $self->insert_rev($name, $head->{$name}{revision}, $hash, $commit->{hash}, $commit->{date}, $commit->{author}, $git_perms); + } + elsif ( $change eq "M" || $change eq "T" ) + { + #$log->debug("MODIFIED $name"); + $head->{$name} = { + name => $name, + revision => $head->{$name}{revision} + 1, + filehash => $hash, + commithash => $commit->{hash}, + modified => $commit->{date}, + author => $commit->{author}, + mode => $git_perms, + }; + $self->insert_rev($name, $head->{$name}{revision}, $hash, $commit->{hash}, $commit->{date}, $commit->{author}, $git_perms); + } + elsif ( $change eq "A" ) + { + #$log->debug("ADDED $name"); + $head->{$name} = { + name => $name, + revision => $head->{$name}{revision} ? $head->{$name}{revision}+1 : 1, + filehash => $hash, + commithash => $commit->{hash}, + modified => $commit->{date}, + author => $commit->{author}, + mode => $git_perms, + }; + $self->insert_rev($name, $head->{$name}{revision}, $hash, $commit->{hash}, $commit->{date}, $commit->{author}, $git_perms); + } + else + { + $log->warn("UNKNOWN FILE CHANGE mode=$mode, hash=$hash, change=$change, name=$name"); + die; + } + } + close FILELIST; + } else { + # this is used to detect files removed from the repo + my $seen_files = {}; + + my $filepipe = open(FILELIST, '-|', 'git', 'ls-tree', '-z', '-r', $commit->{hash}) or die("Cannot call git-ls-tree : $!"); + local $/ = "\0"; + while ( ) + { + chomp; + unless ( /^(\d+)\s+(\w+)\s+([a-zA-Z0-9]+)\t(.*)$/o ) + { + die("Couldn't process git-ls-tree line : $_"); + } + + my ( $git_perms, $git_type, $git_hash, $git_filename ) = ( $1, $2, $3, $4 ); + + $seen_files->{$git_filename} = 1; + + my ( $oldhash, $oldrevision, $oldmode ) = ( + $head->{$git_filename}{filehash}, + $head->{$git_filename}{revision}, + $head->{$git_filename}{mode} + ); + + if ( $git_perms =~ /^\d\d\d(\d)\d\d/o ) + { + $git_perms = ""; + $git_perms .= "r" if ( $1 & 4 ); + $git_perms .= "w" if ( $1 & 2 ); + $git_perms .= "x" if ( $1 & 1 ); + } else { + $git_perms = "rw"; + } + + # unless the file exists with the same hash, we need to update it ... + unless ( defined($oldhash) and $oldhash eq $git_hash and defined($oldmode) and $oldmode eq $git_perms ) + { + my $newrevision = ( $oldrevision or 0 ) + 1; + + $head->{$git_filename} = { + name => $git_filename, + revision => $newrevision, + filehash => $git_hash, + commithash => $commit->{hash}, + modified => $commit->{date}, + author => $commit->{author}, + mode => $git_perms, + }; + + + $self->insert_rev($git_filename, $newrevision, $git_hash, $commit->{hash}, $commit->{date}, $commit->{author}, $git_perms); + } + } + close FILELIST; + + # Detect deleted files + foreach my $file ( keys %$head ) + { + unless ( exists $seen_files->{$file} or $head->{$file}{filehash} eq "deleted" ) + { + $head->{$file}{revision}++; + $head->{$file}{filehash} = "deleted"; + $head->{$file}{commithash} = $commit->{hash}; + $head->{$file}{modified} = $commit->{date}; + $head->{$file}{author} = $commit->{author}; + + $self->insert_rev($file, $head->{$file}{revision}, $head->{$file}{filehash}, $commit->{hash}, $commit->{date}, $commit->{author}, $head->{$file}{mode}); + } + } + # END : "Detect deleted files" + } + + + if (exists $commit->{mergemsg}) + { + $self->insert_mergelog($commit->{hash}, $commit->{mergemsg}); + } + + $lastpicked = $commit->{hash}; + + $self->_set_prop("last_commit", $commit->{hash}); + } + + $self->delete_head(); + foreach my $file ( keys %$head ) + { + $self->insert_head( + $file, + $head->{$file}{revision}, + $head->{$file}{filehash}, + $head->{$file}{commithash}, + $head->{$file}{modified}, + $head->{$file}{author}, + $head->{$file}{mode}, + ); + } + # invalidate the gethead cache + $self->{gethead_cache} = undef; + + + # Ending exclusive lock here + $self->{dbh}->commit() or die "Failed to commit changes to SQLite"; +} + +sub insert_rev +{ + my $self = shift; + my $name = shift; + my $revision = shift; + my $filehash = shift; + my $commithash = shift; + my $modified = shift; + my $author = shift; + my $mode = shift; + my $tablename = $self->tablename("revision"); + + my $insert_rev = $self->{dbh}->prepare_cached("INSERT INTO $tablename (name, revision, filehash, commithash, modified, author, mode) VALUES (?,?,?,?,?,?,?)",{},1); + $insert_rev->execute($name, $revision, $filehash, $commithash, $modified, $author, $mode); +} + +sub insert_mergelog +{ + my $self = shift; + my $key = shift; + my $value = shift; + my $tablename = $self->tablename("commitmsgs"); + + my $insert_mergelog = $self->{dbh}->prepare_cached("INSERT INTO $tablename (key, value) VALUES (?,?)",{},1); + $insert_mergelog->execute($key, $value); +} + +sub delete_head +{ + my $self = shift; + my $tablename = $self->tablename("head"); + + my $delete_head = $self->{dbh}->prepare_cached("DELETE FROM $tablename",{},1); + $delete_head->execute(); +} + +sub insert_head +{ + my $self = shift; + my $name = shift; + my $revision = shift; + my $filehash = shift; + my $commithash = shift; + my $modified = shift; + my $author = shift; + my $mode = shift; + my $tablename = $self->tablename("head"); + + my $insert_head = $self->{dbh}->prepare_cached("INSERT INTO $tablename (name, revision, filehash, commithash, modified, author, mode) VALUES (?,?,?,?,?,?,?)",{},1); + $insert_head->execute($name, $revision, $filehash, $commithash, $modified, $author, $mode); +} + +sub _headrev +{ + my $self = shift; + my $filename = shift; + my $tablename = $self->tablename("head"); + + my $db_query = $self->{dbh}->prepare_cached("SELECT filehash, revision, mode FROM $tablename WHERE name=?",{},1); + $db_query->execute($filename); + my ( $hash, $revision, $mode ) = $db_query->fetchrow_array; + + return ( $hash, $revision, $mode ); +} + +sub _get_prop +{ + my $self = shift; + my $key = shift; + my $tablename = $self->tablename("properties"); + + my $db_query = $self->{dbh}->prepare_cached("SELECT value FROM $tablename WHERE key=?",{},1); + $db_query->execute($key); + my ( $value ) = $db_query->fetchrow_array; + + return $value; +} + +sub _set_prop +{ + my $self = shift; + my $key = shift; + my $value = shift; + my $tablename = $self->tablename("properties"); + + my $db_query = $self->{dbh}->prepare_cached("UPDATE $tablename SET value=? WHERE key=?",{},1); + $db_query->execute($value, $key); + + unless ( $db_query->rows ) + { + $db_query = $self->{dbh}->prepare_cached("INSERT INTO $tablename (key, value) VALUES (?,?)",{},1); + $db_query->execute($key, $value); + } + + return $value; +} + +=head2 gethead + +=cut + +sub gethead +{ + my $self = shift; + my $tablename = $self->tablename("head"); + + return $self->{gethead_cache} if ( defined ( $self->{gethead_cache} ) ); + + my $db_query = $self->{dbh}->prepare_cached("SELECT name, filehash, mode, revision, modified, commithash, author FROM $tablename ORDER BY name ASC",{},1); + $db_query->execute(); + + my $tree = []; + while ( my $file = $db_query->fetchrow_hashref ) + { + push @$tree, $file; + } + + $self->{gethead_cache} = $tree; + + return $tree; +} + +=head2 getlog + +=cut + +sub getlog +{ + my $self = shift; + my $filename = shift; + my $tablename = $self->tablename("revision"); + + my $db_query = $self->{dbh}->prepare_cached("SELECT name, filehash, author, mode, revision, modified, commithash FROM $tablename WHERE name=? ORDER BY revision DESC",{},1); + $db_query->execute($filename); + + my $tree = []; + while ( my $file = $db_query->fetchrow_hashref ) + { + push @$tree, $file; + } + + return $tree; +} + +=head2 getmeta + +This function takes a filename (with path) argument and returns a hashref of +metadata for that file. + +=cut + +sub getmeta +{ + my $self = shift; + my $filename = shift; + my $revision = shift; + my $tablename_rev = $self->tablename("revision"); + my $tablename_head = $self->tablename("head"); + + my $db_query; + if ( defined($revision) and $revision =~ /^\d+$/ ) + { + $db_query = $self->{dbh}->prepare_cached("SELECT * FROM $tablename_rev WHERE name=? AND revision=?",{},1); + $db_query->execute($filename, $revision); + } + elsif ( defined($revision) and $revision =~ /^[a-zA-Z0-9]{40}$/ ) + { + $db_query = $self->{dbh}->prepare_cached("SELECT * FROM $tablename_rev WHERE name=? AND commithash=?",{},1); + $db_query->execute($filename, $revision); + } else { + $db_query = $self->{dbh}->prepare_cached("SELECT * FROM $tablename_head WHERE name=?",{},1); + $db_query->execute($filename); + } + + return $db_query->fetchrow_hashref; +} + +=head2 commitmessage + +this function takes a commithash and returns the commit message for that commit + +=cut +sub commitmessage +{ + my $self = shift; + my $commithash = shift; + my $tablename = $self->tablename("commitmsgs"); + + die("Need commithash") unless ( defined($commithash) and $commithash =~ /^[a-zA-Z0-9]{40}$/ ); + + my $db_query; + $db_query = $self->{dbh}->prepare_cached("SELECT value FROM $tablename WHERE key=?",{},1); + $db_query->execute($commithash); + + my ( $message ) = $db_query->fetchrow_array; + + if ( defined ( $message ) ) + { + $message .= " " if ( $message =~ /\n$/ ); + return $message; + } + + my @lines = safe_pipe_capture("git", "cat-file", "commit", $commithash); + shift @lines while ( $lines[0] =~ /\S/ ); + $message = join("",@lines); + $message .= " " if ( $message =~ /\n$/ ); + return $message; +} + +=head2 gethistory + +This function takes a filename (with path) argument and returns an arrayofarrays +containing revision,filehash,commithash ordered by revision descending + +=cut +sub gethistory +{ + my $self = shift; + my $filename = shift; + my $tablename = $self->tablename("revision"); + + my $db_query; + $db_query = $self->{dbh}->prepare_cached("SELECT revision, filehash, commithash FROM $tablename WHERE name=? ORDER BY revision DESC",{},1); + $db_query->execute($filename); + + return $db_query->fetchall_arrayref; +} + +=head2 gethistorydense + +This function takes a filename (with path) argument and returns an arrayofarrays +containing revision,filehash,commithash ordered by revision descending. + +This version of gethistory skips deleted entries -- so it is useful for annotate. +The 'dense' part is a reference to a '--dense' option available for git-rev-list +and other git tools that depend on it. + +=cut +sub gethistorydense +{ + my $self = shift; + my $filename = shift; + my $tablename = $self->tablename("revision"); + + my $db_query; + $db_query = $self->{dbh}->prepare_cached("SELECT revision, filehash, commithash FROM $tablename WHERE name=? AND filehash!='deleted' ORDER BY revision DESC",{},1); + $db_query->execute($filename); + + return $db_query->fetchall_arrayref; +} + +=head2 in_array() + +from Array::PAT - mimics the in_array() function +found in PHP. Yuck but works for small arrays. + +=cut +sub in_array +{ + my ($check, @array) = @_; + my $retval = 0; + foreach my $test (@array){ + if($check eq $test){ + $retval = 1; + } + } + return $retval; +} + +=head2 safe_pipe_capture + +an alternative to `command` that allows input to be passed as an array +to work around shell problems with weird characters in arguments + +=cut +sub safe_pipe_capture { + + my @output; + + if (my $pid = open my $child, '-|') { + @output = (<$child>); + close $child or die join(' ',@_).": $! $?"; + } else { + exec(@_) or die "$! $?"; # exec() can fail the executable can't be found + } + return wantarray ? @output : join('',@output); +} + +=head2 mangle_dirname + +create a string from a directory name that is suitable to use as +part of a filename, mainly by converting all chars except \w.- to _ + +=cut +sub mangle_dirname { + my $dirname = shift; + return unless defined $dirname; + + $dirname =~ s/[^\w.-]/_/g; + + return $dirname; +} + +=head2 mangle_tablename + +create a string from a that is suitable to use as part of an SQL table +name, mainly by converting all chars except \w to _ + +=cut +sub mangle_tablename { + my $tablename = shift; + return unless defined $tablename; + + $tablename =~ s/[^\w_]/_/g; + + return $tablename; +} + +1; diff --git a/SparkleShare/Mac/git/libexec/git-core/git-daemon b/SparkleShare/Mac/git/libexec/git-core/git-daemon new file mode 100755 index 00000000..14974413 Binary files /dev/null and b/SparkleShare/Mac/git/libexec/git-core/git-daemon differ diff --git a/SparkleShare/Mac/git/libexec/git-core/git-describe b/SparkleShare/Mac/git/libexec/git-core/git-describe new file mode 120000 index 00000000..f2cccfc2 --- /dev/null +++ b/SparkleShare/Mac/git/libexec/git-core/git-describe @@ -0,0 +1 @@ +../../bin/git \ No newline at end of file diff --git a/SparkleShare/Mac/git/libexec/git-core/git-diff b/SparkleShare/Mac/git/libexec/git-core/git-diff new file mode 120000 index 00000000..f2cccfc2 --- /dev/null +++ b/SparkleShare/Mac/git/libexec/git-core/git-diff @@ -0,0 +1 @@ +../../bin/git \ No newline at end of file diff --git a/SparkleShare/Mac/git/libexec/git-core/git-diff-files b/SparkleShare/Mac/git/libexec/git-core/git-diff-files new file mode 120000 index 00000000..f2cccfc2 --- /dev/null +++ b/SparkleShare/Mac/git/libexec/git-core/git-diff-files @@ -0,0 +1 @@ +../../bin/git \ No newline at end of file diff --git a/SparkleShare/Mac/git/libexec/git-core/git-diff-index b/SparkleShare/Mac/git/libexec/git-core/git-diff-index new file mode 120000 index 00000000..f2cccfc2 --- /dev/null +++ b/SparkleShare/Mac/git/libexec/git-core/git-diff-index @@ -0,0 +1 @@ +../../bin/git \ No newline at end of file diff --git a/SparkleShare/Mac/git/libexec/git-core/git-diff-tree b/SparkleShare/Mac/git/libexec/git-core/git-diff-tree new file mode 120000 index 00000000..f2cccfc2 --- /dev/null +++ b/SparkleShare/Mac/git/libexec/git-core/git-diff-tree @@ -0,0 +1 @@ +../../bin/git \ No newline at end of file diff --git a/SparkleShare/Mac/git/libexec/git-core/git-difftool b/SparkleShare/Mac/git/libexec/git-core/git-difftool new file mode 100755 index 00000000..d43a6096 --- /dev/null +++ b/SparkleShare/Mac/git/libexec/git-core/git-difftool @@ -0,0 +1,122 @@ +#!/usr/bin/perl +use lib (split(/:/, $ENV{GITPERLLIB} || "/usr/local/git/lib/perl5/site_perl")); +# Copyright (c) 2009, 2010 David Aguilar +# +# This is a wrapper around the GIT_EXTERNAL_DIFF-compatible +# git-difftool--helper script. +# +# This script exports GIT_EXTERNAL_DIFF and GIT_PAGER for use by git. +# GIT_DIFFTOOL_NO_PROMPT, GIT_DIFFTOOL_PROMPT, and GIT_DIFF_TOOL +# are exported for use by git-difftool--helper. +# +# Any arguments that are unknown to this script are forwarded to 'git diff'. + +use 5.008; +use strict; +use warnings; +use Cwd qw(abs_path); +use File::Basename qw(dirname); + +require Git; + +my $DIR = abs_path(dirname($0)); + + +sub usage +{ + print << 'USAGE'; +usage: git difftool [-t|--tool=] [-x|--extcmd=] + [-y|--no-prompt] [-g|--gui] + ['git diff' options] +USAGE + exit 1; +} + +sub setup_environment +{ + $ENV{PATH} = "$DIR:$ENV{PATH}"; + $ENV{GIT_PAGER} = ''; + $ENV{GIT_EXTERNAL_DIFF} = 'git-difftool--helper'; +} + +sub exe +{ + my $exe = shift; + if ($^O eq 'MSWin32' || $^O eq 'msys') { + return "$exe.exe"; + } + return $exe; +} + +sub generate_command +{ + my @command = (exe('git'), 'diff'); + my $skip_next = 0; + my $idx = -1; + my $prompt = ''; + for my $arg (@ARGV) { + $idx++; + if ($skip_next) { + $skip_next = 0; + next; + } + if ($arg eq '-t' || $arg eq '--tool') { + usage() if $#ARGV <= $idx; + $ENV{GIT_DIFF_TOOL} = $ARGV[$idx + 1]; + $skip_next = 1; + next; + } + if ($arg =~ /^--tool=/) { + $ENV{GIT_DIFF_TOOL} = substr($arg, 7); + next; + } + if ($arg eq '-x' || $arg eq '--extcmd') { + usage() if $#ARGV <= $idx; + $ENV{GIT_DIFFTOOL_EXTCMD} = $ARGV[$idx + 1]; + $skip_next = 1; + next; + } + if ($arg =~ /^--extcmd=/) { + $ENV{GIT_DIFFTOOL_EXTCMD} = substr($arg, 9); + next; + } + if ($arg eq '-g' || $arg eq '--gui') { + eval { + my $tool = Git::command_oneline('config', + 'diff.guitool'); + if (length($tool)) { + $ENV{GIT_DIFF_TOOL} = $tool; + } + }; + next; + } + if ($arg eq '-y' || $arg eq '--no-prompt') { + $prompt = 'no'; + next; + } + if ($arg eq '--prompt') { + $prompt = 'yes'; + next; + } + if ($arg eq '-h' || $arg eq '--help') { + usage(); + } + push @command, $arg; + } + if ($prompt eq 'yes') { + $ENV{GIT_DIFFTOOL_PROMPT} = 'true'; + } elsif ($prompt eq 'no') { + $ENV{GIT_DIFFTOOL_NO_PROMPT} = 'true'; + } + return @command +} + +setup_environment(); + +# ActiveState Perl for Win32 does not implement POSIX semantics of +# exec* system call. It just spawns the given executable and finishes +# the starting program, exiting with code 0. +# system will at least catch the errors returned by git diff, +# allowing the caller of git difftool better handling of failures. +my $rc = system(generate_command()); +exit($rc | ($rc >> 8)); diff --git a/SparkleShare/Mac/git/libexec/git-core/git-difftool--helper b/SparkleShare/Mac/git/libexec/git-core/git-difftool--helper new file mode 100755 index 00000000..0594bf7c --- /dev/null +++ b/SparkleShare/Mac/git/libexec/git-core/git-difftool--helper @@ -0,0 +1,72 @@ +#!/bin/sh +# git-difftool--helper is a GIT_EXTERNAL_DIFF-compatible diff tool launcher. +# This script is typically launched by using the 'git difftool' +# convenience command. +# +# Copyright (c) 2009, 2010 David Aguilar + +TOOL_MODE=diff +. git-mergetool--lib + +# difftool.prompt controls the default prompt/no-prompt behavior +# and is overridden with $GIT_DIFFTOOL*_PROMPT. +should_prompt () { + prompt_merge=$(git config --bool mergetool.prompt || echo true) + prompt=$(git config --bool difftool.prompt || echo $prompt_merge) + if test "$prompt" = true; then + test -z "$GIT_DIFFTOOL_NO_PROMPT" + else + test -n "$GIT_DIFFTOOL_PROMPT" + fi +} + +# Indicates that --extcmd=... was specified +use_ext_cmd () { + test -n "$GIT_DIFFTOOL_EXTCMD" +} + +launch_merge_tool () { + # Merged is the filename as it appears in the work tree + # Local is the contents of a/filename + # Remote is the contents of b/filename + # Custom merge tool commands might use $BASE so we provide it + MERGED="$1" + LOCAL="$2" + REMOTE="$3" + BASE="$1" + + # $LOCAL and $REMOTE are temporary files so prompt + # the user with the real $MERGED name before launching $merge_tool. + if should_prompt; then + printf "\nViewing: '$MERGED'\n" + if use_ext_cmd; then + printf "Hit return to launch '%s': " \ + "$GIT_DIFFTOOL_EXTCMD" + else + printf "Hit return to launch '%s': " "$merge_tool" + fi + read ans + fi + + if use_ext_cmd; then + export BASE + eval $GIT_DIFFTOOL_EXTCMD '"$LOCAL"' '"$REMOTE"' + else + run_merge_tool "$merge_tool" + fi +} + +if ! use_ext_cmd; then + if test -n "$GIT_DIFF_TOOL"; then + merge_tool="$GIT_DIFF_TOOL" + else + merge_tool="$(get_merge_tool)" || exit + fi +fi + +# Launch the merge tool on each path provided by 'git diff' +while test $# -gt 6 +do + launch_merge_tool "$1" "$2" "$5" + shift 7 +done diff --git a/SparkleShare/Mac/git/libexec/git-core/git-fast-export b/SparkleShare/Mac/git/libexec/git-core/git-fast-export new file mode 120000 index 00000000..f2cccfc2 --- /dev/null +++ b/SparkleShare/Mac/git/libexec/git-core/git-fast-export @@ -0,0 +1 @@ +../../bin/git \ No newline at end of file diff --git a/SparkleShare/Mac/git/libexec/git-core/git-fast-import b/SparkleShare/Mac/git/libexec/git-core/git-fast-import new file mode 100755 index 00000000..c869fe01 Binary files /dev/null and b/SparkleShare/Mac/git/libexec/git-core/git-fast-import differ diff --git a/SparkleShare/Mac/git/libexec/git-core/git-fetch b/SparkleShare/Mac/git/libexec/git-core/git-fetch new file mode 120000 index 00000000..f2cccfc2 --- /dev/null +++ b/SparkleShare/Mac/git/libexec/git-core/git-fetch @@ -0,0 +1 @@ +../../bin/git \ No newline at end of file diff --git a/SparkleShare/Mac/git/libexec/git-core/git-fetch-pack b/SparkleShare/Mac/git/libexec/git-core/git-fetch-pack new file mode 120000 index 00000000..f2cccfc2 --- /dev/null +++ b/SparkleShare/Mac/git/libexec/git-core/git-fetch-pack @@ -0,0 +1 @@ +../../bin/git \ No newline at end of file diff --git a/SparkleShare/Mac/git/libexec/git-core/git-filter-branch b/SparkleShare/Mac/git/libexec/git-core/git-filter-branch new file mode 100755 index 00000000..ae602e37 --- /dev/null +++ b/SparkleShare/Mac/git/libexec/git-core/git-filter-branch @@ -0,0 +1,516 @@ +#!/bin/sh +# +# Rewrite revision history +# Copyright (c) Petr Baudis, 2006 +# Minimal changes to "port" it to core-git (c) Johannes Schindelin, 2007 +# +# Lets you rewrite the revision history of the current branch, creating +# a new branch. You can specify a number of filters to modify the commits, +# files and trees. + +# The following functions will also be available in the commit filter: + +functions=$(cat << \EOF +warn () { + echo "$*" >&2 +} + +map() +{ + # if it was not rewritten, take the original + if test -r "$workdir/../map/$1" + then + cat "$workdir/../map/$1" + else + echo "$1" + fi +} + +# if you run 'skip_commit "$@"' in a commit filter, it will print +# the (mapped) parents, effectively skipping the commit. + +skip_commit() +{ + shift; + while [ -n "$1" ]; + do + shift; + map "$1"; + shift; + done; +} + +# if you run 'git_commit_non_empty_tree "$@"' in a commit filter, +# it will skip commits that leave the tree untouched, commit the other. +git_commit_non_empty_tree() +{ + if test $# = 3 && test "$1" = $(git rev-parse "$3^{tree}"); then + map "$3" + else + git commit-tree "$@" + fi +} +# override die(): this version puts in an extra line break, so that +# the progress is still visible + +die() +{ + echo >&2 + echo "$*" >&2 + exit 1 +} +EOF +) + +eval "$functions" + +# When piped a commit, output a script to set the ident of either +# "author" or "committer + +set_ident () { + lid="$(echo "$1" | tr "[A-Z]" "[a-z]")" + uid="$(echo "$1" | tr "[a-z]" "[A-Z]")" + pick_id_script=' + /^'$lid' /{ + s/'\''/'\''\\'\'\''/g + h + s/^'$lid' \([^<]*\) <[^>]*> .*$/\1/ + s/'\''/'\''\'\'\''/g + s/.*/GIT_'$uid'_NAME='\''&'\''; export GIT_'$uid'_NAME/p + + g + s/^'$lid' [^<]* <\([^>]*\)> .*$/\1/ + s/'\''/'\''\'\'\''/g + s/.*/GIT_'$uid'_EMAIL='\''&'\''; export GIT_'$uid'_EMAIL/p + + g + s/^'$lid' [^<]* <[^>]*> \(.*\)$/\1/ + s/'\''/'\''\'\'\''/g + s/.*/GIT_'$uid'_DATE='\''&'\''; export GIT_'$uid'_DATE/p + + q + } + ' + + LANG=C LC_ALL=C sed -ne "$pick_id_script" + # Ensure non-empty id name. + echo "case \"\$GIT_${uid}_NAME\" in \"\") GIT_${uid}_NAME=\"\${GIT_${uid}_EMAIL%%@*}\" && export GIT_${uid}_NAME;; esac" +} + +USAGE="[--env-filter ] [--tree-filter ] + [--index-filter ] [--parent-filter ] + [--msg-filter ] [--commit-filter ] + [--tag-name-filter ] [--subdirectory-filter ] + [--original ] [-d ] [-f | --force] + [...]" + +OPTIONS_SPEC= +. git-sh-setup + +if [ "$(is_bare_repository)" = false ]; then + git diff-files --ignore-submodules --quiet && + git diff-index --cached --quiet HEAD -- || + die "Cannot rewrite branch(es) with a dirty working directory." +fi + +tempdir=.git-rewrite +filter_env= +filter_tree= +filter_index= +filter_parent= +filter_msg=cat +filter_commit= +filter_tag_name= +filter_subdir= +orig_namespace=refs/original/ +force= +prune_empty= +remap_to_ancestor= +while : +do + case "$1" in + --) + shift + break + ;; + --force|-f) + shift + force=t + continue + ;; + --remap-to-ancestor) + # deprecated ($remap_to_ancestor is set now automatically) + shift + remap_to_ancestor=t + continue + ;; + --prune-empty) + shift + prune_empty=t + continue + ;; + -*) + ;; + *) + break; + esac + + # all switches take one argument + ARG="$1" + case "$#" in 1) usage ;; esac + shift + OPTARG="$1" + shift + + case "$ARG" in + -d) + tempdir="$OPTARG" + ;; + --env-filter) + filter_env="$OPTARG" + ;; + --tree-filter) + filter_tree="$OPTARG" + ;; + --index-filter) + filter_index="$OPTARG" + ;; + --parent-filter) + filter_parent="$OPTARG" + ;; + --msg-filter) + filter_msg="$OPTARG" + ;; + --commit-filter) + filter_commit="$functions; $OPTARG" + ;; + --tag-name-filter) + filter_tag_name="$OPTARG" + ;; + --subdirectory-filter) + filter_subdir="$OPTARG" + remap_to_ancestor=t + ;; + --original) + orig_namespace=$(expr "$OPTARG/" : '\(.*[^/]\)/*$')/ + ;; + *) + usage + ;; + esac +done + +case "$prune_empty,$filter_commit" in +,) + filter_commit='git commit-tree "$@"';; +t,) + filter_commit="$functions;"' git_commit_non_empty_tree "$@"';; +,*) + ;; +*) + die "Cannot set --prune-empty and --commit-filter at the same time" +esac + +case "$force" in +t) + rm -rf "$tempdir" +;; +'') + test -d "$tempdir" && + die "$tempdir already exists, please remove it" +esac +mkdir -p "$tempdir/t" && +tempdir="$(cd "$tempdir"; pwd)" && +cd "$tempdir/t" && +workdir="$(pwd)" || +die "" + +# Remove tempdir on exit +trap 'cd ../..; rm -rf "$tempdir"' 0 + +ORIG_GIT_DIR="$GIT_DIR" +ORIG_GIT_WORK_TREE="$GIT_WORK_TREE" +ORIG_GIT_INDEX_FILE="$GIT_INDEX_FILE" +GIT_WORK_TREE=. +export GIT_DIR GIT_WORK_TREE + +# Make sure refs/original is empty +git for-each-ref > "$tempdir"/backup-refs || exit +while read sha1 type name +do + case "$force,$name" in + ,$orig_namespace*) + die "Cannot create a new backup. +A previous backup already exists in $orig_namespace +Force overwriting the backup with -f" + ;; + t,$orig_namespace*) + git update-ref -d "$name" $sha1 + ;; + esac +done < "$tempdir"/backup-refs + +# The refs should be updated if their heads were rewritten +git rev-parse --no-flags --revs-only --symbolic-full-name \ + --default HEAD "$@" > "$tempdir"/raw-heads || exit +sed -e '/^^/d' "$tempdir"/raw-heads >"$tempdir"/heads + +test -s "$tempdir"/heads || + die "Which ref do you want to rewrite?" + +GIT_INDEX_FILE="$(pwd)/../index" +export GIT_INDEX_FILE + +# map old->new commit ids for rewriting parents +mkdir ../map || die "Could not create map/ directory" + +# we need "--" only if there are no path arguments in $@ +nonrevs=$(git rev-parse --no-revs "$@") || exit +if test -z "$nonrevs" +then + dashdash=-- +else + dashdash= + remap_to_ancestor=t +fi + +rev_args=$(git rev-parse --revs-only "$@") + +case "$filter_subdir" in +"") + eval set -- "$(git rev-parse --sq --no-revs "$@")" + ;; +*) + eval set -- "$(git rev-parse --sq --no-revs "$@" $dashdash \ + "$filter_subdir")" + ;; +esac + +git rev-list --reverse --topo-order --default HEAD \ + --parents --simplify-merges $rev_args "$@" > ../revs || + die "Could not get the commits" +commits=$(wc -l <../revs | tr -d " ") + +test $commits -eq 0 && die "Found nothing to rewrite" + +# Rewrite the commits + +git_filter_branch__commit_count=0 +while read commit parents; do + git_filter_branch__commit_count=$(($git_filter_branch__commit_count+1)) + printf "\rRewrite $commit ($git_filter_branch__commit_count/$commits)" + + case "$filter_subdir" in + "") + git read-tree -i -m $commit + ;; + *) + # The commit may not have the subdirectory at all + err=$(git read-tree -i -m $commit:"$filter_subdir" 2>&1) || { + if ! git rev-parse -q --verify $commit:"$filter_subdir" + then + rm -f "$GIT_INDEX_FILE" + else + echo >&2 "$err" + false + fi + } + esac || die "Could not initialize the index" + + GIT_COMMIT=$commit + export GIT_COMMIT + git cat-file commit "$commit" >../commit || + die "Cannot read commit $commit" + + eval "$(set_ident AUTHOR <../commit)" || + die "setting author failed for commit $commit" + eval "$(set_ident COMMITTER <../commit)" || + die "setting committer failed for commit $commit" + eval "$filter_env" < /dev/null || + die "env filter failed: $filter_env" + + if [ "$filter_tree" ]; then + git checkout-index -f -u -a || + die "Could not checkout the index" + # files that $commit removed are now still in the working tree; + # remove them, else they would be added again + git clean -d -q -f -x + eval "$filter_tree" < /dev/null || + die "tree filter failed: $filter_tree" + + ( + git diff-index -r --name-only --ignore-submodules $commit && + git ls-files --others + ) > "$tempdir"/tree-state || exit + git update-index --add --replace --remove --stdin \ + < "$tempdir"/tree-state || exit + fi + + eval "$filter_index" < /dev/null || + die "index filter failed: $filter_index" + + parentstr= + for parent in $parents; do + for reparent in $(map "$parent"); do + parentstr="$parentstr -p $reparent" + done + done + if [ "$filter_parent" ]; then + parentstr="$(echo "$parentstr" | eval "$filter_parent")" || + die "parent filter failed: $filter_parent" + fi + + sed -e '1,/^$/d' <../commit | \ + eval "$filter_msg" > ../message || + die "msg filter failed: $filter_msg" + /bin/sh -c "$filter_commit" "git commit-tree" \ + $(git write-tree) $parentstr < ../message > ../map/$commit || + die "could not write rewritten commit" +done <../revs + +# If we are filtering for paths, as in the case of a subdirectory +# filter, it is possible that a specified head is not in the set of +# rewritten commits, because it was pruned by the revision walker. +# Ancestor remapping fixes this by mapping these heads to the unique +# nearest ancestor that survived the pruning. + +if test "$remap_to_ancestor" = t +then + while read ref + do + sha1=$(git rev-parse "$ref"^0) + test -f "$workdir"/../map/$sha1 && continue + ancestor=$(git rev-list --simplify-merges -1 "$ref" "$@") + test "$ancestor" && echo $(map $ancestor) >> "$workdir"/../map/$sha1 + done < "$tempdir"/heads +fi + +# Finally update the refs + +_x40='[0-9a-f][0-9a-f][0-9a-f][0-9a-f][0-9a-f]' +_x40="$_x40$_x40$_x40$_x40$_x40$_x40$_x40$_x40" +echo +while read ref +do + # avoid rewriting a ref twice + test -f "$orig_namespace$ref" && continue + + sha1=$(git rev-parse "$ref"^0) + rewritten=$(map $sha1) + + test $sha1 = "$rewritten" && + warn "WARNING: Ref '$ref' is unchanged" && + continue + + case "$rewritten" in + '') + echo "Ref '$ref' was deleted" + git update-ref -m "filter-branch: delete" -d "$ref" $sha1 || + die "Could not delete $ref" + ;; + $_x40) + echo "Ref '$ref' was rewritten" + if ! git update-ref -m "filter-branch: rewrite" \ + "$ref" $rewritten $sha1 2>/dev/null; then + if test $(git cat-file -t "$ref") = tag; then + if test -z "$filter_tag_name"; then + warn "WARNING: You said to rewrite tagged commits, but not the corresponding tag." + warn "WARNING: Perhaps use '--tag-name-filter cat' to rewrite the tag." + fi + else + die "Could not rewrite $ref" + fi + fi + ;; + *) + # NEEDSWORK: possibly add -Werror, making this an error + warn "WARNING: '$ref' was rewritten into multiple commits:" + warn "$rewritten" + warn "WARNING: Ref '$ref' points to the first one now." + rewritten=$(echo "$rewritten" | head -n 1) + git update-ref -m "filter-branch: rewrite to first" \ + "$ref" $rewritten $sha1 || + die "Could not rewrite $ref" + ;; + esac + git update-ref -m "filter-branch: backup" "$orig_namespace$ref" $sha1 || + exit +done < "$tempdir"/heads + +# TODO: This should possibly go, with the semantics that all positive given +# refs are updated, and their original heads stored in refs/original/ +# Filter tags + +if [ "$filter_tag_name" ]; then + git for-each-ref --format='%(objectname) %(objecttype) %(refname)' refs/tags | + while read sha1 type ref; do + ref="${ref#refs/tags/}" + # XXX: Rewrite tagged trees as well? + if [ "$type" != "commit" -a "$type" != "tag" ]; then + continue; + fi + + if [ "$type" = "tag" ]; then + # Dereference to a commit + sha1t="$sha1" + sha1="$(git rev-parse -q "$sha1"^{commit})" || continue + fi + + [ -f "../map/$sha1" ] || continue + new_sha1="$(cat "../map/$sha1")" + GIT_COMMIT="$sha1" + export GIT_COMMIT + new_ref="$(echo "$ref" | eval "$filter_tag_name")" || + die "tag name filter failed: $filter_tag_name" + + echo "$ref -> $new_ref ($sha1 -> $new_sha1)" + + if [ "$type" = "tag" ]; then + new_sha1=$( ( printf 'object %s\ntype commit\ntag %s\n' \ + "$new_sha1" "$new_ref" + git cat-file tag "$ref" | + sed -n \ + -e '1,/^$/{ + /^object /d + /^type /d + /^tag /d + }' \ + -e '/^-----BEGIN PGP SIGNATURE-----/q' \ + -e 'p' ) | + git mktag) || + die "Could not create new tag object for $ref" + if git cat-file tag "$ref" | \ + sane_grep '^-----BEGIN PGP SIGNATURE-----' >/dev/null 2>&1 + then + warn "gpg signature stripped from tag object $sha1t" + fi + fi + + git update-ref "refs/tags/$new_ref" "$new_sha1" || + die "Could not write tag $new_ref" + done +fi + +cd ../.. +rm -rf "$tempdir" + +trap - 0 + +unset GIT_DIR GIT_WORK_TREE GIT_INDEX_FILE +test -z "$ORIG_GIT_DIR" || { + GIT_DIR="$ORIG_GIT_DIR" && export GIT_DIR +} +test -z "$ORIG_GIT_WORK_TREE" || { + GIT_WORK_TREE="$ORIG_GIT_WORK_TREE" && + export GIT_WORK_TREE +} +test -z "$ORIG_GIT_INDEX_FILE" || { + GIT_INDEX_FILE="$ORIG_GIT_INDEX_FILE" && + export GIT_INDEX_FILE +} + +if [ "$(is_bare_repository)" = false ]; then + git read-tree -u -m HEAD || exit +fi + +exit 0 diff --git a/SparkleShare/Mac/git/libexec/git-core/git-fmt-merge-msg b/SparkleShare/Mac/git/libexec/git-core/git-fmt-merge-msg new file mode 120000 index 00000000..f2cccfc2 --- /dev/null +++ b/SparkleShare/Mac/git/libexec/git-core/git-fmt-merge-msg @@ -0,0 +1 @@ +../../bin/git \ No newline at end of file diff --git a/SparkleShare/Mac/git/libexec/git-core/git-for-each-ref b/SparkleShare/Mac/git/libexec/git-core/git-for-each-ref new file mode 120000 index 00000000..f2cccfc2 --- /dev/null +++ b/SparkleShare/Mac/git/libexec/git-core/git-for-each-ref @@ -0,0 +1 @@ +../../bin/git \ No newline at end of file diff --git a/SparkleShare/Mac/git/libexec/git-core/git-format-patch b/SparkleShare/Mac/git/libexec/git-core/git-format-patch new file mode 120000 index 00000000..f2cccfc2 --- /dev/null +++ b/SparkleShare/Mac/git/libexec/git-core/git-format-patch @@ -0,0 +1 @@ +../../bin/git \ No newline at end of file diff --git a/SparkleShare/Mac/git/libexec/git-core/git-fsck b/SparkleShare/Mac/git/libexec/git-core/git-fsck new file mode 120000 index 00000000..f2cccfc2 --- /dev/null +++ b/SparkleShare/Mac/git/libexec/git-core/git-fsck @@ -0,0 +1 @@ +../../bin/git \ No newline at end of file diff --git a/SparkleShare/Mac/git/libexec/git-core/git-fsck-objects b/SparkleShare/Mac/git/libexec/git-core/git-fsck-objects new file mode 120000 index 00000000..f2cccfc2 --- /dev/null +++ b/SparkleShare/Mac/git/libexec/git-core/git-fsck-objects @@ -0,0 +1 @@ +../../bin/git \ No newline at end of file diff --git a/SparkleShare/Mac/git/libexec/git-core/git-gc b/SparkleShare/Mac/git/libexec/git-core/git-gc new file mode 120000 index 00000000..f2cccfc2 --- /dev/null +++ b/SparkleShare/Mac/git/libexec/git-core/git-gc @@ -0,0 +1 @@ +../../bin/git \ No newline at end of file diff --git a/SparkleShare/Mac/git/libexec/git-core/git-get-tar-commit-id b/SparkleShare/Mac/git/libexec/git-core/git-get-tar-commit-id new file mode 120000 index 00000000..f2cccfc2 --- /dev/null +++ b/SparkleShare/Mac/git/libexec/git-core/git-get-tar-commit-id @@ -0,0 +1 @@ +../../bin/git \ No newline at end of file diff --git a/SparkleShare/Mac/git/libexec/git-core/git-grep b/SparkleShare/Mac/git/libexec/git-core/git-grep new file mode 120000 index 00000000..f2cccfc2 --- /dev/null +++ b/SparkleShare/Mac/git/libexec/git-core/git-grep @@ -0,0 +1 @@ +../../bin/git \ No newline at end of file diff --git a/SparkleShare/Mac/git/libexec/git-core/git-gui b/SparkleShare/Mac/git/libexec/git-core/git-gui new file mode 100755 index 00000000..d2499def --- /dev/null +++ b/SparkleShare/Mac/git/libexec/git-core/git-gui @@ -0,0 +1,8 @@ +#!/bin/sh +if test "z$*" = zversion || + test "z$*" = z--version +then + echo 'git-gui version 0.13.0.8.g8f85' +else + exec '/usr/local/git/share/git-gui/lib/Git Gui.app/Contents/MacOS/Wish' "$0" "$@" +fi diff --git a/SparkleShare/Mac/git/libexec/git-core/git-gui--askpass b/SparkleShare/Mac/git/libexec/git-core/git-gui--askpass new file mode 100755 index 00000000..4277f30c --- /dev/null +++ b/SparkleShare/Mac/git/libexec/git-core/git-gui--askpass @@ -0,0 +1,66 @@ +#!/bin/sh +# Tcl ignores the next line -*- tcl -*- \ +exec wish "$0" -- "$@" + +# This is a trivial implementation of an SSH_ASKPASS handler. +# Git-gui uses this script if none are already configured. + +package require Tk + +set answer {} +set yesno 0 +set rc 255 + +if {$argc < 1} { + set prompt "Enter your OpenSSH passphrase:" +} else { + set prompt [join $argv " "] + if {[regexp -nocase {\(yes\/no\)\?\s*$} $prompt]} { + set yesno 1 + } +} + +message .m -text $prompt -justify center -aspect 4000 +pack .m -side top -fill x -padx 20 -pady 20 -expand 1 + +entry .e -textvariable answer -width 50 +pack .e -side top -fill x -padx 10 -pady 10 + +if {!$yesno} { + .e configure -show "*" +} + +frame .b +button .b.ok -text OK -command finish +button .b.cancel -text Cancel -command cancel + +pack .b.ok -side left -expand 1 +pack .b.cancel -side right -expand 1 +pack .b -side bottom -fill x -padx 10 -pady 10 + +bind . {focus -force .e} +bind . [list .b.ok invoke] +bind . [list .b.cancel invoke] +bind . {set rc $rc} + +proc cancel {} { + set ::rc 255 +} + +proc finish {} { + if {$::yesno} { + if {$::answer ne "yes" && $::answer ne "no"} { + tk_messageBox -icon error -title "Error" -type ok \ + -message "Only 'yes' or 'no' input allowed." + return + } + } + + puts $::answer + set ::rc 0 +} + +wm title . "OpenSSH" +tk::PlaceWindow . +vwait rc +exit $rc diff --git a/SparkleShare/Mac/git/libexec/git-core/git-hash-object b/SparkleShare/Mac/git/libexec/git-core/git-hash-object new file mode 120000 index 00000000..f2cccfc2 --- /dev/null +++ b/SparkleShare/Mac/git/libexec/git-core/git-hash-object @@ -0,0 +1 @@ +../../bin/git \ No newline at end of file diff --git a/SparkleShare/Mac/git/libexec/git-core/git-help b/SparkleShare/Mac/git/libexec/git-core/git-help new file mode 120000 index 00000000..f2cccfc2 --- /dev/null +++ b/SparkleShare/Mac/git/libexec/git-core/git-help @@ -0,0 +1 @@ +../../bin/git \ No newline at end of file diff --git a/SparkleShare/Mac/git/libexec/git-core/git-http-backend b/SparkleShare/Mac/git/libexec/git-core/git-http-backend new file mode 100755 index 00000000..9b7ecc0f Binary files /dev/null and b/SparkleShare/Mac/git/libexec/git-core/git-http-backend differ diff --git a/SparkleShare/Mac/git/libexec/git-core/git-http-fetch b/SparkleShare/Mac/git/libexec/git-core/git-http-fetch new file mode 100755 index 00000000..3b407068 Binary files /dev/null and b/SparkleShare/Mac/git/libexec/git-core/git-http-fetch differ diff --git a/SparkleShare/Mac/git/libexec/git-core/git-http-push b/SparkleShare/Mac/git/libexec/git-core/git-http-push new file mode 100755 index 00000000..0171a071 Binary files /dev/null and b/SparkleShare/Mac/git/libexec/git-core/git-http-push differ diff --git a/SparkleShare/Mac/git/libexec/git-core/git-imap-send b/SparkleShare/Mac/git/libexec/git-core/git-imap-send new file mode 100755 index 00000000..63f35543 Binary files /dev/null and b/SparkleShare/Mac/git/libexec/git-core/git-imap-send differ diff --git a/SparkleShare/Mac/git/libexec/git-core/git-index-pack b/SparkleShare/Mac/git/libexec/git-core/git-index-pack new file mode 120000 index 00000000..f2cccfc2 --- /dev/null +++ b/SparkleShare/Mac/git/libexec/git-core/git-index-pack @@ -0,0 +1 @@ +../../bin/git \ No newline at end of file diff --git a/SparkleShare/Mac/git/libexec/git-core/git-init b/SparkleShare/Mac/git/libexec/git-core/git-init new file mode 120000 index 00000000..f2cccfc2 --- /dev/null +++ b/SparkleShare/Mac/git/libexec/git-core/git-init @@ -0,0 +1 @@ +../../bin/git \ No newline at end of file diff --git a/SparkleShare/Mac/git/libexec/git-core/git-init-db b/SparkleShare/Mac/git/libexec/git-core/git-init-db new file mode 120000 index 00000000..f2cccfc2 --- /dev/null +++ b/SparkleShare/Mac/git/libexec/git-core/git-init-db @@ -0,0 +1 @@ +../../bin/git \ No newline at end of file diff --git a/SparkleShare/Mac/git/libexec/git-core/git-instaweb b/SparkleShare/Mac/git/libexec/git-core/git-instaweb new file mode 100755 index 00000000..67bd4fcc --- /dev/null +++ b/SparkleShare/Mac/git/libexec/git-core/git-instaweb @@ -0,0 +1,622 @@ +#!/bin/sh +# +# Copyright (c) 2006 Eric Wong +# + +PERL='/usr/bin/perl' +OPTIONS_KEEPDASHDASH= +OPTIONS_SPEC="\ +git instaweb [options] (--start | --stop | --restart) +-- +l,local only bind on 127.0.0.1 +p,port= the port to bind to +d,httpd= the command to launch +b,browser= the browser to launch +m,module-path= the module path (only needed for apache2) + Action +stop stop the web server +start start the web server +restart restart the web server +" + +. git-sh-setup + +fqgitdir="$GIT_DIR" +local="$(git config --bool --get instaweb.local)" +httpd="$(git config --get instaweb.httpd)" +root="$(git config --get instaweb.gitwebdir)" +port=$(git config --get instaweb.port) +module_path="$(git config --get instaweb.modulepath)" + +conf="$GIT_DIR/gitweb/httpd.conf" + +# Defaults: + +# if installed, it doesn't need further configuration (module_path) +test -z "$httpd" && httpd='lighttpd -f' + +# Default is /usr/local/git/share/gitweb +test -z "$root" && root='/usr/local/git/share/gitweb' + +# any untaken local port will do... +test -z "$port" && port=1234 + +resolve_full_httpd () { + case "$httpd" in + *apache2*|*lighttpd*|*httpd*) + # yes, *httpd* covers *lighttpd* above, but it is there for clarity + # ensure that the apache2/lighttpd command ends with "-f" + if ! echo "$httpd" | sane_grep -- '-f *$' >/dev/null 2>&1 + then + httpd="$httpd -f" + fi + ;; + *plackup*) + # server is started by running via generated gitweb.psgi in $fqgitdir/gitweb + full_httpd="$fqgitdir/gitweb/gitweb.psgi" + httpd_only="${httpd%% *}" # cut on first space + return + ;; + *webrick*) + # server is started by running via generated webrick.rb in + # $fqgitdir/gitweb + full_httpd="$fqgitdir/gitweb/webrick.rb" + httpd_only="${httpd%% *}" # cut on first space + return + ;; + esac + + httpd_only="$(echo $httpd | cut -f1 -d' ')" + if case "$httpd_only" in /*) : ;; *) which $httpd_only >/dev/null 2>&1;; esac + then + full_httpd=$httpd + else + # many httpds are installed in /usr/sbin or /usr/local/sbin + # these days and those are not in most users $PATHs + # in addition, we may have generated a server script + # in $fqgitdir/gitweb. + for i in /usr/local/sbin /usr/sbin "$root" "$fqgitdir/gitweb" + do + if test -x "$i/$httpd_only" + then + full_httpd=$i/$httpd + return + fi + done + + echo >&2 "$httpd_only not found. Install $httpd_only or use" \ + "--httpd to specify another httpd daemon." + exit 1 + fi +} + +start_httpd () { + if test -f "$fqgitdir/pid"; then + say "Instance already running. Restarting..." + stop_httpd + fi + + # here $httpd should have a meaningful value + resolve_full_httpd + + # don't quote $full_httpd, there can be arguments to it (-f) + case "$httpd" in + *mongoose*|*plackup*) + #These servers don't have a daemon mode so we'll have to fork it + $full_httpd "$fqgitdir/gitweb/httpd.conf" & + #Save the pid before doing anything else (we'll print it later) + pid=$! + + if test $? != 0; then + echo "Could not execute http daemon $httpd." + exit 1 + fi + + cat > "$fqgitdir/pid" <new('127.0.0.1:$port')); +print 'Waiting for \'$httpd\' to start ..'; +do { + print '.'; + sleep(1); +} until (IO::Socket::INET->new('127.0.0.1:$port')); +print qq! (done)\n!; +" +} + +while test $# != 0 +do + case "$1" in + --stop|stop) + stop_httpd + exit 0 + ;; + --start|start) + start_httpd + exit 0 + ;; + --restart|restart) + stop_httpd + start_httpd + exit 0 + ;; + -l|--local) + local=true + ;; + -d|--httpd) + shift + httpd="$1" + ;; + -b|--browser) + shift + browser="$1" + ;; + -p|--port) + shift + port="$1" + ;; + -m|--module-path) + shift + module_path="$1" + ;; + --) + ;; + *) + usage + ;; + esac + shift +done + +mkdir -p "$GIT_DIR/gitweb/tmp" +GIT_EXEC_PATH="$(git --exec-path)" +GIT_DIR="$fqgitdir" +GITWEB_CONFIG="$fqgitdir/gitweb/gitweb_config.perl" +export GIT_EXEC_PATH GIT_DIR GITWEB_CONFIG + +webrick_conf () { + # webrick seems to have no way of passing arbitrary environment + # variables to the underlying CGI executable, so we wrap the + # actual gitweb.cgi using a shell script to force it + wrapper="$fqgitdir/gitweb/$httpd/wrapper.sh" + cat > "$wrapper" <"$fqgitdir/gitweb/$httpd.rb" < $port, + :DocumentRoot => "$root", + :Logger => Logger.new('$fqgitdir/gitweb/error.log'), + :AccessLog => [ + [ Logger.new('$fqgitdir/gitweb/access.log'), + WEBrick::AccessLog::COMBINED_LOG_FORMAT ] + ], + :DirectoryIndex => ["gitweb.cgi"], + :CGIInterpreter => "$wrapper", + :StartCallback => lambda do + File.open("$fqgitdir/pid", "w") { |f| f.puts Process.pid } + end, + :ServerType => WEBrick::Daemon, +} +options[:BindAddress] = '127.0.0.1' if "$local" == "true" +server = WEBrick::HTTPServer.new(options) +['INT', 'TERM'].each do |signal| + trap(signal) {server.shutdown} +end +server.start +EOF + chmod +x "$fqgitdir/gitweb/$httpd.rb" + # configuration is embedded in server script file, webrick.rb + rm -f "$conf" +} + +lighttpd_conf () { + cat > "$conf" < env.PATH, "GITWEB_CONFIG" => env.GITWEB_CONFIG ) + +cgi.assign = ( ".cgi" => "" ) + +# mimetype mapping +mimetype.assign = ( + ".pdf" => "application/pdf", + ".sig" => "application/pgp-signature", + ".spl" => "application/futuresplash", + ".class" => "application/octet-stream", + ".ps" => "application/postscript", + ".torrent" => "application/x-bittorrent", + ".dvi" => "application/x-dvi", + ".gz" => "application/x-gzip", + ".pac" => "application/x-ns-proxy-autoconfig", + ".swf" => "application/x-shockwave-flash", + ".tar.gz" => "application/x-tgz", + ".tgz" => "application/x-tgz", + ".tar" => "application/x-tar", + ".zip" => "application/zip", + ".mp3" => "audio/mpeg", + ".m3u" => "audio/x-mpegurl", + ".wma" => "audio/x-ms-wma", + ".wax" => "audio/x-ms-wax", + ".ogg" => "application/ogg", + ".wav" => "audio/x-wav", + ".gif" => "image/gif", + ".jpg" => "image/jpeg", + ".jpeg" => "image/jpeg", + ".png" => "image/png", + ".xbm" => "image/x-xbitmap", + ".xpm" => "image/x-xpixmap", + ".xwd" => "image/x-xwindowdump", + ".css" => "text/css", + ".html" => "text/html", + ".htm" => "text/html", + ".js" => "text/javascript", + ".asc" => "text/plain", + ".c" => "text/plain", + ".cpp" => "text/plain", + ".log" => "text/plain", + ".conf" => "text/plain", + ".text" => "text/plain", + ".txt" => "text/plain", + ".dtd" => "text/xml", + ".xml" => "text/xml", + ".mpeg" => "video/mpeg", + ".mpg" => "video/mpeg", + ".mov" => "video/quicktime", + ".qt" => "video/quicktime", + ".avi" => "video/x-msvideo", + ".asf" => "video/x-ms-asf", + ".asx" => "video/x-ms-asf", + ".wmv" => "video/x-ms-wmv", + ".bz2" => "application/x-bzip", + ".tbz" => "application/x-bzip-compressed-tar", + ".tar.bz2" => "application/x-bzip-compressed-tar", + "" => "text/plain" + ) +EOF + test x"$local" = xtrue && echo 'server.bind = "127.0.0.1"' >> "$conf" +} + +apache2_conf () { + if test -z "$module_path" + then + test -d "/usr/lib/httpd/modules" && + module_path="/usr/lib/httpd/modules" + test -d "/usr/lib/apache2/modules" && + module_path="/usr/lib/apache2/modules" + fi + bind= + test x"$local" = xtrue && bind='127.0.0.1:' + echo 'text/css css' > "$fqgitdir/mime.types" + cat > "$conf" <> "$conf" + fi + done + cat >> "$conf" <) has been applied + if test -f "$module_path/mod_perl.so" && + sane_grep 'MOD_PERL' "$root/gitweb.cgi" >/dev/null + then + # favor mod_perl if available + cat >> "$conf" < + SetHandler perl-script + PerlResponseHandler ModPerl::Registry + PerlOptions +ParseHeaders + Options +ExecCGI + +EOF + else + # plain-old CGI + resolve_full_httpd + list_mods=$(echo "$full_httpd" | sed 's/-f$/-l/') + $list_mods | sane_grep 'mod_cgi\.c' >/dev/null 2>&1 || \ + if test -f "$module_path/mod_cgi.so" + then + echo "LoadModule cgi_module $module_path/mod_cgi.so" >> "$conf" + else + $list_mods | grep 'mod_cgid\.c' >/dev/null 2>&1 || \ + if test -f "$module_path/mod_cgid.so" + then + echo "LoadModule cgid_module $module_path/mod_cgid.so" \ + >> "$conf" + else + echo "You have no CGI support!" + exit 2 + fi + echo "ScriptSock logs/gitweb.sock" >> "$conf" + fi + cat >> "$conf" < + Options +ExecCGI + +EOF + fi +} + +mongoose_conf() { + cat > "$conf" < "$fqgitdir/gitweb/gitweb.psgi" <add_type( + ".pdf" => "application/pdf", + ".sig" => "application/pgp-signature", + ".spl" => "application/futuresplash", + ".class" => "application/octet-stream", + ".ps" => "application/postscript", + ".torrent" => "application/x-bittorrent", + ".dvi" => "application/x-dvi", + ".gz" => "application/x-gzip", + ".pac" => "application/x-ns-proxy-autoconfig", + ".swf" => "application/x-shockwave-flash", + ".tar.gz" => "application/x-tgz", + ".tgz" => "application/x-tgz", + ".tar" => "application/x-tar", + ".zip" => "application/zip", + ".mp3" => "audio/mpeg", + ".m3u" => "audio/x-mpegurl", + ".wma" => "audio/x-ms-wma", + ".wax" => "audio/x-ms-wax", + ".ogg" => "application/ogg", + ".wav" => "audio/x-wav", + ".gif" => "image/gif", + ".jpg" => "image/jpeg", + ".jpeg" => "image/jpeg", + ".png" => "image/png", + ".xbm" => "image/x-xbitmap", + ".xpm" => "image/x-xpixmap", + ".xwd" => "image/x-xwindowdump", + ".css" => "text/css", + ".html" => "text/html", + ".htm" => "text/html", + ".js" => "text/javascript", + ".asc" => "text/plain", + ".c" => "text/plain", + ".cpp" => "text/plain", + ".log" => "text/plain", + ".conf" => "text/plain", + ".text" => "text/plain", + ".txt" => "text/plain", + ".dtd" => "text/xml", + ".xml" => "text/xml", + ".mpeg" => "video/mpeg", + ".mpg" => "video/mpeg", + ".mov" => "video/quicktime", + ".qt" => "video/quicktime", + ".avi" => "video/x-msvideo", + ".asf" => "video/x-ms-asf", + ".asx" => "video/x-ms-asf", + ".wmv" => "video/x-ms-wmv", + ".bz2" => "application/x-bzip", + ".tbz" => "application/x-bzip-compressed-tar", + ".tar.bz2" => "application/x-bzip-compressed-tar", + "" => "text/plain" +); + +my \$app = builder { + # to be able to override \$SIG{__WARN__} to log build time warnings + use CGI::Carp; # it sets \$SIG{__WARN__} itself + + my \$logdir = "$fqgitdir/gitweb/$httpd_only"; + open my \$access_log_fh, '>>', "\$logdir/access.log" + or die "Couldn't open access log '\$logdir/access.log': \$!"; + open my \$error_log_fh, '>>', "\$logdir/error.log" + or die "Couldn't open error log '\$logdir/error.log': \$!"; + + \$access_log_fh->autoflush(1); + \$error_log_fh->autoflush(1); + + # redirect build time warnings to error.log + \$SIG{'__WARN__'} = sub { + my \$msg = shift; + # timestamp warning like in CGI::Carp::warn + my \$stamp = CGI::Carp::stamp(); + \$msg =~ s/^/\$stamp/gm; + print \$error_log_fh \$msg; + }; + + # write errors to error.log, access to access.log + enable 'AccessLog', + format => "combined", + logger => sub { print \$access_log_fh @_; }; + enable sub { + my \$app = shift; + sub { + my \$env = shift; + \$env->{'psgi.errors'} = \$error_log_fh; + \$app->(\$env); + } + }; + # gitweb currently doesn't work with $SIG{CHLD} set to 'IGNORE', + # because it uses 'close $fd or die...' on piped filehandle $fh + # (which causes the parent process to wait for child to finish). + enable_if { \$SIG{'CHLD'} eq 'IGNORE' } sub { + my \$app = shift; + sub { + my \$env = shift; + local \$SIG{'CHLD'} = 'DEFAULT'; + local \$SIG{'CLD'} = 'DEFAULT'; + \$app->(\$env); + } + }; + # serve static files, i.e. stylesheet, images, script + enable 'Static', + path => sub { m!\.(js|css|png)\$! && s!^/gitweb/!! }, + root => "$root/", + encoding => 'utf-8'; # encoding for 'text/plain' files + # convert CGI application to PSGI app + Plack::App::WrapCGI->new(script => "$root/gitweb.cgi")->to_app; +}; + +# make it runnable as standalone app, +# like it would be run via 'plackup' utility +if (__FILE__ eq \$0) { + require Plack::Runner; + + my \$runner = Plack::Runner->new(); + \$runner->parse_options(qw(--env deployment --port $port), + "$local" ? qw(--host 127.0.0.1) : ()); + \$runner->run(\$app); +} +__END__ +EOF + + chmod a+x "$fqgitdir/gitweb/gitweb.psgi" + # configuration is embedded in server script file, gitweb.psgi + rm -f "$conf" +} + +gitweb_conf() { + cat > "$fqgitdir/gitweb/gitweb_config.perl" <&2 + +if [ "$#" != "0" ] +then + usage +fi + +laf="$GIT_DIR/lost-found" +rm -fr "$laf" && mkdir -p "$laf/commit" "$laf/other" || exit + +git fsck --full --no-reflogs | +while read dangling type sha1 +do + case "$dangling" in + dangling) + if git rev-parse -q --verify "$sha1^0" >/dev/null + then + dir="$laf/commit" + git show-branch "$sha1" + else + dir="$laf/other" + fi + echo "$sha1" >"$dir/$sha1" + ;; + esac +done diff --git a/SparkleShare/Mac/git/libexec/git-core/git-ls-files b/SparkleShare/Mac/git/libexec/git-core/git-ls-files new file mode 120000 index 00000000..f2cccfc2 --- /dev/null +++ b/SparkleShare/Mac/git/libexec/git-core/git-ls-files @@ -0,0 +1 @@ +../../bin/git \ No newline at end of file diff --git a/SparkleShare/Mac/git/libexec/git-core/git-ls-remote b/SparkleShare/Mac/git/libexec/git-core/git-ls-remote new file mode 120000 index 00000000..f2cccfc2 --- /dev/null +++ b/SparkleShare/Mac/git/libexec/git-core/git-ls-remote @@ -0,0 +1 @@ +../../bin/git \ No newline at end of file diff --git a/SparkleShare/Mac/git/libexec/git-core/git-ls-tree b/SparkleShare/Mac/git/libexec/git-core/git-ls-tree new file mode 120000 index 00000000..f2cccfc2 --- /dev/null +++ b/SparkleShare/Mac/git/libexec/git-core/git-ls-tree @@ -0,0 +1 @@ +../../bin/git \ No newline at end of file diff --git a/SparkleShare/Mac/git/libexec/git-core/git-mailinfo b/SparkleShare/Mac/git/libexec/git-core/git-mailinfo new file mode 120000 index 00000000..f2cccfc2 --- /dev/null +++ b/SparkleShare/Mac/git/libexec/git-core/git-mailinfo @@ -0,0 +1 @@ +../../bin/git \ No newline at end of file diff --git a/SparkleShare/Mac/git/libexec/git-core/git-mailsplit b/SparkleShare/Mac/git/libexec/git-core/git-mailsplit new file mode 120000 index 00000000..f2cccfc2 --- /dev/null +++ b/SparkleShare/Mac/git/libexec/git-core/git-mailsplit @@ -0,0 +1 @@ +../../bin/git \ No newline at end of file diff --git a/SparkleShare/Mac/git/libexec/git-core/git-merge b/SparkleShare/Mac/git/libexec/git-core/git-merge new file mode 120000 index 00000000..f2cccfc2 --- /dev/null +++ b/SparkleShare/Mac/git/libexec/git-core/git-merge @@ -0,0 +1 @@ +../../bin/git \ No newline at end of file diff --git a/SparkleShare/Mac/git/libexec/git-core/git-merge-base b/SparkleShare/Mac/git/libexec/git-core/git-merge-base new file mode 120000 index 00000000..f2cccfc2 --- /dev/null +++ b/SparkleShare/Mac/git/libexec/git-core/git-merge-base @@ -0,0 +1 @@ +../../bin/git \ No newline at end of file diff --git a/SparkleShare/Mac/git/libexec/git-core/git-merge-file b/SparkleShare/Mac/git/libexec/git-core/git-merge-file new file mode 120000 index 00000000..f2cccfc2 --- /dev/null +++ b/SparkleShare/Mac/git/libexec/git-core/git-merge-file @@ -0,0 +1 @@ +../../bin/git \ No newline at end of file diff --git a/SparkleShare/Mac/git/libexec/git-core/git-merge-index b/SparkleShare/Mac/git/libexec/git-core/git-merge-index new file mode 120000 index 00000000..f2cccfc2 --- /dev/null +++ b/SparkleShare/Mac/git/libexec/git-core/git-merge-index @@ -0,0 +1 @@ +../../bin/git \ No newline at end of file diff --git a/SparkleShare/Mac/git/libexec/git-core/git-merge-octopus b/SparkleShare/Mac/git/libexec/git-core/git-merge-octopus new file mode 100755 index 00000000..8643f74c --- /dev/null +++ b/SparkleShare/Mac/git/libexec/git-core/git-merge-octopus @@ -0,0 +1,109 @@ +#!/bin/sh +# +# Copyright (c) 2005 Junio C Hamano +# +# Resolve two or more trees. +# + +LF=' +' + +die () { + echo >&2 "$*" + exit 1 +} + +# The first parameters up to -- are merge bases; the rest are heads. +bases= head= remotes= sep_seen= +for arg +do + case ",$sep_seen,$head,$arg," in + *,--,) + sep_seen=yes + ;; + ,yes,,*) + head=$arg + ;; + ,yes,*) + remotes="$remotes$arg " + ;; + *) + bases="$bases$arg " + ;; + esac +done + +# Reject if this is not an Octopus -- resolve should be used instead. +case "$remotes" in +?*' '?*) + ;; +*) + exit 2 ;; +esac + +# MRC is the current "merge reference commit" +# MRT is the current "merge result tree" + +MRC=$(git rev-parse --verify -q $head) +MRT=$(git write-tree) +NON_FF_MERGE=0 +OCTOPUS_FAILURE=0 +for SHA1 in $remotes +do + case "$OCTOPUS_FAILURE" in + 1) + # We allow only last one to have a hand-resolvable + # conflicts. Last round failed and we still had + # a head to merge. + echo "Automated merge did not work." + echo "Should not be doing an Octopus." + exit 2 + esac + + eval pretty_name=\${GITHEAD_$SHA1:-$SHA1} + if test "$SHA1" = "$pretty_name" + then + SHA1_UP="$(echo "$SHA1" | tr a-z A-Z)" + eval pretty_name=\${GITHEAD_$SHA1_UP:-$pretty_name} + fi + common=$(git merge-base --all $SHA1 $MRC) || + die "Unable to find common commit with $pretty_name" + + case "$LF$common$LF" in + *"$LF$SHA1$LF"*) + echo "Already up-to-date with $pretty_name" + continue + ;; + esac + + if test "$common,$NON_FF_MERGE" = "$MRC,0" + then + # The first head being merged was a fast-forward. + # Advance MRC to the head being merged, and use that + # tree as the intermediate result of the merge. + # We still need to count this as part of the parent set. + + echo "Fast-forwarding to: $pretty_name" + git read-tree -u -m $head $SHA1 || exit + MRC=$SHA1 MRT=$(git write-tree) + continue + fi + + NON_FF_MERGE=1 + + echo "Trying simple merge with $pretty_name" + git read-tree -u -m --aggressive $common $MRT $SHA1 || exit 2 + next=$(git write-tree 2>/dev/null) + if test $? -ne 0 + then + echo "Simple merge did not work, trying automatic merge." + git-merge-index -o git-merge-one-file -a || + OCTOPUS_FAILURE=1 + next=$(git write-tree 2>/dev/null) + fi + + MRC="$MRC $SHA1" + MRT=$next +done + +exit "$OCTOPUS_FAILURE" diff --git a/SparkleShare/Mac/git/libexec/git-core/git-merge-one-file b/SparkleShare/Mac/git/libexec/git-core/git-merge-one-file new file mode 100755 index 00000000..d067894b --- /dev/null +++ b/SparkleShare/Mac/git/libexec/git-core/git-merge-one-file @@ -0,0 +1,160 @@ +#!/bin/sh +# +# Copyright (c) Linus Torvalds, 2005 +# +# This is the git per-file merge script, called with +# +# $1 - original file SHA1 (or empty) +# $2 - file in branch1 SHA1 (or empty) +# $3 - file in branch2 SHA1 (or empty) +# $4 - pathname in repository +# $5 - original file mode (or empty) +# $6 - file in branch1 mode (or empty) +# $7 - file in branch2 mode (or empty) +# +# Handle some trivial cases.. The _really_ trivial cases have +# been handled already by git read-tree, but that one doesn't +# do any merges that might change the tree layout. + +USAGE=' ' +USAGE="$USAGE " +LONG_USAGE="Usage: git merge-one-file $USAGE + +Blob ids and modes should be empty for missing files." + +if ! test "$#" -eq 7 +then + echo "$LONG_USAGE" + exit 1 +fi + +case "${1:-.}${2:-.}${3:-.}" in +# +# Deleted in both or deleted in one and unchanged in the other +# +"$1.." | "$1.$1" | "$1$1.") + if [ "$2" ]; then + echo "Removing $4" + else + # read-tree checked that index matches HEAD already, + # so we know we do not have this path tracked. + # there may be an unrelated working tree file here, + # which we should just leave unmolested. Make sure + # we do not have it in the index, though. + exec git update-index --remove -- "$4" + fi + if test -f "$4"; then + rm -f -- "$4" && + rmdir -p "$(expr "z$4" : 'z\(.*\)/')" 2>/dev/null || : + fi && + exec git update-index --remove -- "$4" + ;; + +# +# Added in one. +# +".$2.") + # the other side did not add and we added so there is nothing + # to be done, except making the path merged. + exec git update-index --add --cacheinfo "$6" "$2" "$4" + ;; +"..$3") + echo "Adding $4" + if test -f "$4" + then + echo "ERROR: untracked $4 is overwritten by the merge." + exit 1 + fi + git update-index --add --cacheinfo "$7" "$3" "$4" && + exec git checkout-index -u -f -- "$4" + ;; + +# +# Added in both, identically (check for same permissions). +# +".$3$2") + if [ "$6" != "$7" ]; then + echo "ERROR: File $4 added identically in both branches," + echo "ERROR: but permissions conflict $6->$7." + exit 1 + fi + echo "Adding $4" + git update-index --add --cacheinfo "$6" "$2" "$4" && + exec git checkout-index -u -f -- "$4" + ;; + +# +# Modified in both, but differently. +# +"$1$2$3" | ".$2$3") + + case ",$6,$7," in + *,120000,*) + echo "ERROR: $4: Not merging symbolic link changes." + exit 1 + ;; + *,160000,*) + echo "ERROR: $4: Not merging conflicting submodule changes." + exit 1 + ;; + esac + + src2=`git-unpack-file $3` + case "$1" in + '') + echo "Added $4 in both, but differently." + # This extracts OUR file in $orig, and uses git apply to + # remove lines that are unique to ours. + orig=`git-unpack-file $2` + sz0=`wc -c <"$orig"` + diff -u -La/$orig -Lb/$orig $orig $src2 | git apply --no-add + sz1=`wc -c <"$orig"` + + # If we do not have enough common material, it is not + # worth trying two-file merge using common subsections. + expr "$sz0" \< "$sz1" \* 2 >/dev/null || : >$orig + ;; + *) + echo "Auto-merging $4" + orig=`git-unpack-file $1` + ;; + esac + + # Be careful for funny filename such as "-L" in "$4", which + # would confuse "merge" greatly. + src1=`git-unpack-file $2` + git merge-file "$src1" "$orig" "$src2" + ret=$? + msg= + if [ $ret -ne 0 ]; then + msg='content conflict' + fi + + # Create the working tree file, using "our tree" version from the + # index, and then store the result of the merge. + git checkout-index -f --stage=2 -- "$4" && cat "$src1" >"$4" + rm -f -- "$orig" "$src1" "$src2" + + if [ "$6" != "$7" ]; then + if [ -n "$msg" ]; then + msg="$msg, " + fi + msg="${msg}permissions conflict: $5->$6,$7" + ret=1 + fi + if [ "$1" = '' ]; then + ret=1 + fi + + if [ $ret -ne 0 ]; then + echo "ERROR: $msg in $4" + exit 1 + fi + exec git update-index -- "$4" + ;; + +*) + echo "ERROR: $4: Not handling case $1 -> $2 -> $3" + ;; +esac +exit 1 diff --git a/SparkleShare/Mac/git/libexec/git-core/git-merge-ours b/SparkleShare/Mac/git/libexec/git-core/git-merge-ours new file mode 120000 index 00000000..f2cccfc2 --- /dev/null +++ b/SparkleShare/Mac/git/libexec/git-core/git-merge-ours @@ -0,0 +1 @@ +../../bin/git \ No newline at end of file diff --git a/SparkleShare/Mac/git/libexec/git-core/git-merge-recursive b/SparkleShare/Mac/git/libexec/git-core/git-merge-recursive new file mode 120000 index 00000000..f2cccfc2 --- /dev/null +++ b/SparkleShare/Mac/git/libexec/git-core/git-merge-recursive @@ -0,0 +1 @@ +../../bin/git \ No newline at end of file diff --git a/SparkleShare/Mac/git/libexec/git-core/git-merge-resolve b/SparkleShare/Mac/git/libexec/git-core/git-merge-resolve new file mode 100755 index 00000000..c9da747f --- /dev/null +++ b/SparkleShare/Mac/git/libexec/git-core/git-merge-resolve @@ -0,0 +1,54 @@ +#!/bin/sh +# +# Copyright (c) 2005 Linus Torvalds +# Copyright (c) 2005 Junio C Hamano +# +# Resolve two trees, using enhanced multi-base read-tree. + +# The first parameters up to -- are merge bases; the rest are heads. +bases= head= remotes= sep_seen= +for arg +do + case ",$sep_seen,$head,$arg," in + *,--,) + sep_seen=yes + ;; + ,yes,,*) + head=$arg + ;; + ,yes,*) + remotes="$remotes$arg " + ;; + *) + bases="$bases$arg " + ;; + esac +done + +# Give up if we are given two or more remotes -- not handling octopus. +case "$remotes" in +?*' '?*) + exit 2 ;; +esac + +# Give up if this is a baseless merge. +if test '' = "$bases" +then + exit 2 +fi + +git update-index -q --refresh +git read-tree -u -m --aggressive $bases $head $remotes || exit 2 +echo "Trying simple merge." +if result_tree=$(git write-tree 2>/dev/null) +then + exit 0 +else + echo "Simple merge failed, trying Automatic merge." + if git-merge-index -o git-merge-one-file -a + then + exit 0 + else + exit 1 + fi +fi diff --git a/SparkleShare/Mac/git/libexec/git-core/git-merge-subtree b/SparkleShare/Mac/git/libexec/git-core/git-merge-subtree new file mode 120000 index 00000000..f2cccfc2 --- /dev/null +++ b/SparkleShare/Mac/git/libexec/git-core/git-merge-subtree @@ -0,0 +1 @@ +../../bin/git \ No newline at end of file diff --git a/SparkleShare/Mac/git/libexec/git-core/git-merge-tree b/SparkleShare/Mac/git/libexec/git-core/git-merge-tree new file mode 120000 index 00000000..f2cccfc2 --- /dev/null +++ b/SparkleShare/Mac/git/libexec/git-core/git-merge-tree @@ -0,0 +1 @@ +../../bin/git \ No newline at end of file diff --git a/SparkleShare/Mac/git/libexec/git-core/git-mergetool b/SparkleShare/Mac/git/libexec/git-core/git-mergetool new file mode 100755 index 00000000..2f8dc441 --- /dev/null +++ b/SparkleShare/Mac/git/libexec/git-core/git-mergetool @@ -0,0 +1,327 @@ +#!/bin/sh +# +# This program resolves merge conflicts in git +# +# Copyright (c) 2006 Theodore Y. Ts'o +# +# This file is licensed under the GPL v2, or a later version +# at the discretion of Junio C Hamano. +# + +USAGE='[--tool=tool] [-y|--no-prompt|--prompt] [file to merge] ...' +SUBDIRECTORY_OK=Yes +OPTIONS_SPEC= +TOOL_MODE=merge +. git-sh-setup +. git-mergetool--lib +require_work_tree + +# Returns true if the mode reflects a symlink +is_symlink () { + test "$1" = 120000 +} + +local_present () { + test -n "$local_mode" +} + +remote_present () { + test -n "$remote_mode" +} + +base_present () { + test -n "$base_mode" +} + +cleanup_temp_files () { + if test "$1" = --save-backup ; then + mv -- "$BACKUP" "$MERGED.orig" + rm -f -- "$LOCAL" "$REMOTE" "$BASE" + else + rm -f -- "$LOCAL" "$REMOTE" "$BASE" "$BACKUP" + fi +} + +describe_file () { + mode="$1" + branch="$2" + file="$3" + + printf " {%s}: " "$branch" + if test -z "$mode"; then + echo "deleted" + elif is_symlink "$mode" ; then + echo "a symbolic link -> '$(cat "$file")'" + else + if base_present; then + echo "modified" + else + echo "created" + fi + fi +} + + +resolve_symlink_merge () { + while true; do + printf "Use (l)ocal or (r)emote, or (a)bort? " + read ans + case "$ans" in + [lL]*) + git checkout-index -f --stage=2 -- "$MERGED" + git add -- "$MERGED" + cleanup_temp_files --save-backup + return 0 + ;; + [rR]*) + git checkout-index -f --stage=3 -- "$MERGED" + git add -- "$MERGED" + cleanup_temp_files --save-backup + return 0 + ;; + [aA]*) + return 1 + ;; + esac + done +} + +resolve_deleted_merge () { + while true; do + if base_present; then + printf "Use (m)odified or (d)eleted file, or (a)bort? " + else + printf "Use (c)reated or (d)eleted file, or (a)bort? " + fi + read ans + case "$ans" in + [mMcC]*) + git add -- "$MERGED" + cleanup_temp_files --save-backup + return 0 + ;; + [dD]*) + git rm -- "$MERGED" > /dev/null + cleanup_temp_files + return 0 + ;; + [aA]*) + return 1 + ;; + esac + done +} + +checkout_staged_file () { + tmpfile=$(expr "$(git checkout-index --temp --stage="$1" "$2")" : '\([^ ]*\) ') + + if test $? -eq 0 -a -n "$tmpfile" ; then + mv -- "$(git rev-parse --show-cdup)$tmpfile" "$3" + fi +} + +merge_file () { + MERGED="$1" + + f=$(git ls-files -u -- "$MERGED") + if test -z "$f" ; then + if test ! -f "$MERGED" ; then + echo "$MERGED: file not found" + else + echo "$MERGED: file does not need merging" + fi + return 1 + fi + + ext="$$$(expr "$MERGED" : '.*\(\.[^/]*\)$')" + BACKUP="./$MERGED.BACKUP.$ext" + LOCAL="./$MERGED.LOCAL.$ext" + REMOTE="./$MERGED.REMOTE.$ext" + BASE="./$MERGED.BASE.$ext" + + mv -- "$MERGED" "$BACKUP" + cp -- "$BACKUP" "$MERGED" + + base_mode=$(git ls-files -u -- "$MERGED" | awk '{if ($3==1) print $1;}') + local_mode=$(git ls-files -u -- "$MERGED" | awk '{if ($3==2) print $1;}') + remote_mode=$(git ls-files -u -- "$MERGED" | awk '{if ($3==3) print $1;}') + + base_present && checkout_staged_file 1 "$MERGED" "$BASE" + local_present && checkout_staged_file 2 "$MERGED" "$LOCAL" + remote_present && checkout_staged_file 3 "$MERGED" "$REMOTE" + + if test -z "$local_mode" -o -z "$remote_mode"; then + echo "Deleted merge conflict for '$MERGED':" + describe_file "$local_mode" "local" "$LOCAL" + describe_file "$remote_mode" "remote" "$REMOTE" + resolve_deleted_merge + return + fi + + if is_symlink "$local_mode" || is_symlink "$remote_mode"; then + echo "Symbolic link merge conflict for '$MERGED':" + describe_file "$local_mode" "local" "$LOCAL" + describe_file "$remote_mode" "remote" "$REMOTE" + resolve_symlink_merge + return + fi + + echo "Normal merge conflict for '$MERGED':" + describe_file "$local_mode" "local" "$LOCAL" + describe_file "$remote_mode" "remote" "$REMOTE" + if "$prompt" = true; then + printf "Hit return to start merge resolution tool (%s): " "$merge_tool" + read ans + fi + + if base_present; then + present=true + else + present=false + fi + + if ! run_merge_tool "$merge_tool" "$present"; then + echo "merge of $MERGED failed" 1>&2 + mv -- "$BACKUP" "$MERGED" + + if test "$merge_keep_temporaries" = "false"; then + cleanup_temp_files + fi + + return 1 + fi + + if test "$merge_keep_backup" = "true"; then + mv -- "$BACKUP" "$MERGED.orig" + else + rm -- "$BACKUP" + fi + + git add -- "$MERGED" + cleanup_temp_files + return 0 +} + +prompt=$(git config --bool mergetool.prompt || echo true) + +while test $# != 0 +do + case "$1" in + -t|--tool*) + case "$#,$1" in + *,*=*) + merge_tool=$(expr "z$1" : 'z-[^=]*=\(.*\)') + ;; + 1,*) + usage ;; + *) + merge_tool="$2" + shift ;; + esac + ;; + -y|--no-prompt) + prompt=false + ;; + --prompt) + prompt=true + ;; + --) + shift + break + ;; + -*) + usage + ;; + *) + break + ;; + esac + shift +done + +prompt_after_failed_merge() { + while true; do + printf "Continue merging other unresolved paths (y/n) ? " + read ans + case "$ans" in + + [yY]*) + return 0 + ;; + + [nN]*) + return 1 + ;; + esac + done +} + +if test -z "$merge_tool"; then + merge_tool=$(get_merge_tool "$merge_tool") || exit +fi +merge_keep_backup="$(git config --bool mergetool.keepBackup || echo true)" +merge_keep_temporaries="$(git config --bool mergetool.keepTemporaries || echo false)" + +last_status=0 +rollup_status=0 +rerere=false + +files_to_merge() { + if test "$rerere" = true + then + git rerere status + else + git ls-files -u | sed -e 's/^[^ ]* //' | sort -u + fi +} + + +if test $# -eq 0 ; then + cd_to_toplevel + + if test -e "$GIT_DIR/MERGE_RR" + then + rerere=true + fi + + files=$(files_to_merge) + if test -z "$files" ; then + echo "No files need merging" + exit 0 + fi + + # Save original stdin + exec 3<&0 + + printf "Merging:\n" + printf "$files\n" + + files_to_merge | + while IFS= read i + do + if test $last_status -ne 0; then + prompt_after_failed_merge <&3 || exit 1 + fi + printf "\n" + merge_file "$i" <&3 + last_status=$? + if test $last_status -ne 0; then + rollup_status=1 + fi + done +else + while test $# -gt 0; do + if test $last_status -ne 0; then + prompt_after_failed_merge || exit 1 + fi + printf "\n" + merge_file "$1" + last_status=$? + if test $last_status -ne 0; then + rollup_status=1 + fi + shift + done +fi + +exit $rollup_status diff --git a/SparkleShare/Mac/git/libexec/git-core/git-mergetool--lib b/SparkleShare/Mac/git/libexec/git-core/git-mergetool--lib new file mode 100644 index 00000000..77d4aee2 --- /dev/null +++ b/SparkleShare/Mac/git/libexec/git-core/git-mergetool--lib @@ -0,0 +1,425 @@ +#!/bin/sh +# git-mergetool--lib is a library for common merge tool functions +diff_mode() { + test "$TOOL_MODE" = diff +} + +merge_mode() { + test "$TOOL_MODE" = merge +} + +translate_merge_tool_path () { + case "$1" in + vimdiff|vimdiff2) + echo vim + ;; + gvimdiff|gvimdiff2) + echo gvim + ;; + emerge) + echo emacs + ;; + araxis) + echo compare + ;; + *) + echo "$1" + ;; + esac +} + +check_unchanged () { + if test "$MERGED" -nt "$BACKUP"; then + status=0 + else + while true; do + echo "$MERGED seems unchanged." + printf "Was the merge successful? [y/n] " + read answer + case "$answer" in + y*|Y*) status=0; break ;; + n*|N*) status=1; break ;; + esac + done + fi +} + +valid_tool () { + case "$1" in + kdiff3 | tkdiff | xxdiff | meld | opendiff | \ + vimdiff | gvimdiff | vimdiff2 | gvimdiff2 | \ + emerge | ecmerge | diffuse | araxis | p4merge) + ;; # happy + tortoisemerge) + if ! merge_mode; then + return 1 + fi + ;; + kompare) + if ! diff_mode; then + return 1 + fi + ;; + *) + if test -z "$(get_merge_tool_cmd "$1")"; then + return 1 + fi + ;; + esac +} + +get_merge_tool_cmd () { + # Prints the custom command for a merge tool + if test -n "$1"; then + merge_tool="$1" + else + merge_tool="$(get_merge_tool)" + fi + if diff_mode; then + echo "$(git config difftool.$merge_tool.cmd || + git config mergetool.$merge_tool.cmd)" + else + echo "$(git config mergetool.$merge_tool.cmd)" + fi +} + +run_merge_tool () { + merge_tool_path="$(get_merge_tool_path "$1")" || exit + base_present="$2" + status=0 + + case "$1" in + kdiff3) + if merge_mode; then + if $base_present; then + ("$merge_tool_path" --auto \ + --L1 "$MERGED (Base)" \ + --L2 "$MERGED (Local)" \ + --L3 "$MERGED (Remote)" \ + -o "$MERGED" \ + "$BASE" "$LOCAL" "$REMOTE" \ + > /dev/null 2>&1) + else + ("$merge_tool_path" --auto \ + --L1 "$MERGED (Local)" \ + --L2 "$MERGED (Remote)" \ + -o "$MERGED" \ + "$LOCAL" "$REMOTE" \ + > /dev/null 2>&1) + fi + status=$? + else + ("$merge_tool_path" --auto \ + --L1 "$MERGED (A)" \ + --L2 "$MERGED (B)" "$LOCAL" "$REMOTE" \ + > /dev/null 2>&1) + fi + ;; + kompare) + "$merge_tool_path" "$LOCAL" "$REMOTE" + ;; + tkdiff) + if merge_mode; then + if $base_present; then + "$merge_tool_path" -a "$BASE" \ + -o "$MERGED" "$LOCAL" "$REMOTE" + else + "$merge_tool_path" \ + -o "$MERGED" "$LOCAL" "$REMOTE" + fi + status=$? + else + "$merge_tool_path" "$LOCAL" "$REMOTE" + fi + ;; + p4merge) + if merge_mode; then + touch "$BACKUP" + if $base_present; then + "$merge_tool_path" "$BASE" "$LOCAL" "$REMOTE" "$MERGED" + else + "$merge_tool_path" "$LOCAL" "$LOCAL" "$REMOTE" "$MERGED" + fi + check_unchanged + else + "$merge_tool_path" "$LOCAL" "$REMOTE" + fi + ;; + meld) + if merge_mode; then + touch "$BACKUP" + "$merge_tool_path" "$LOCAL" "$MERGED" "$REMOTE" + check_unchanged + else + "$merge_tool_path" "$LOCAL" "$REMOTE" + fi + ;; + diffuse) + if merge_mode; then + touch "$BACKUP" + if $base_present; then + "$merge_tool_path" \ + "$LOCAL" "$MERGED" "$REMOTE" \ + "$BASE" | cat + else + "$merge_tool_path" \ + "$LOCAL" "$MERGED" "$REMOTE" | cat + fi + check_unchanged + else + "$merge_tool_path" "$LOCAL" "$REMOTE" | cat + fi + ;; + vimdiff|gvimdiff) + if merge_mode; then + touch "$BACKUP" + if $base_present; then + "$merge_tool_path" -f -d -c "wincmd J" \ + "$MERGED" "$LOCAL" "$BASE" "$REMOTE" + else + "$merge_tool_path" -f -d -c "wincmd l" \ + "$LOCAL" "$MERGED" "$REMOTE" + fi + check_unchanged + else + "$merge_tool_path" -f -d -c "wincmd l" \ + "$LOCAL" "$REMOTE" + fi + ;; + vimdiff2|gvimdiff2) + if merge_mode; then + touch "$BACKUP" + "$merge_tool_path" -f -d -c "wincmd l" \ + "$LOCAL" "$MERGED" "$REMOTE" + check_unchanged + else + "$merge_tool_path" -f -d -c "wincmd l" \ + "$LOCAL" "$REMOTE" + fi + ;; + xxdiff) + if merge_mode; then + touch "$BACKUP" + if $base_present; then + "$merge_tool_path" -X --show-merged-pane \ + -R 'Accel.SaveAsMerged: "Ctrl-S"' \ + -R 'Accel.Search: "Ctrl+F"' \ + -R 'Accel.SearchForward: "Ctrl-G"' \ + --merged-file "$MERGED" \ + "$LOCAL" "$BASE" "$REMOTE" + else + "$merge_tool_path" -X $extra \ + -R 'Accel.SaveAsMerged: "Ctrl-S"' \ + -R 'Accel.Search: "Ctrl+F"' \ + -R 'Accel.SearchForward: "Ctrl-G"' \ + --merged-file "$MERGED" \ + "$LOCAL" "$REMOTE" + fi + check_unchanged + else + "$merge_tool_path" \ + -R 'Accel.Search: "Ctrl+F"' \ + -R 'Accel.SearchForward: "Ctrl-G"' \ + "$LOCAL" "$REMOTE" + fi + ;; + opendiff) + if merge_mode; then + touch "$BACKUP" + if $base_present; then + "$merge_tool_path" "$LOCAL" "$REMOTE" \ + -ancestor "$BASE" \ + -merge "$MERGED" | cat + else + "$merge_tool_path" "$LOCAL" "$REMOTE" \ + -merge "$MERGED" | cat + fi + check_unchanged + else + "$merge_tool_path" "$LOCAL" "$REMOTE" | cat + fi + ;; + ecmerge) + if merge_mode; then + touch "$BACKUP" + if $base_present; then + "$merge_tool_path" "$BASE" "$LOCAL" "$REMOTE" \ + --default --mode=merge3 --to="$MERGED" + else + "$merge_tool_path" "$LOCAL" "$REMOTE" \ + --default --mode=merge2 --to="$MERGED" + fi + check_unchanged + else + "$merge_tool_path" --default --mode=diff2 \ + "$LOCAL" "$REMOTE" + fi + ;; + emerge) + if merge_mode; then + if $base_present; then + "$merge_tool_path" \ + -f emerge-files-with-ancestor-command \ + "$LOCAL" "$REMOTE" "$BASE" \ + "$(basename "$MERGED")" + else + "$merge_tool_path" \ + -f emerge-files-command \ + "$LOCAL" "$REMOTE" \ + "$(basename "$MERGED")" + fi + status=$? + else + "$merge_tool_path" -f emerge-files-command \ + "$LOCAL" "$REMOTE" + fi + ;; + tortoisemerge) + if $base_present; then + touch "$BACKUP" + "$merge_tool_path" \ + -base:"$BASE" -mine:"$LOCAL" \ + -theirs:"$REMOTE" -merged:"$MERGED" + check_unchanged + else + echo "TortoiseMerge cannot be used without a base" 1>&2 + status=1 + fi + ;; + araxis) + if merge_mode; then + touch "$BACKUP" + if $base_present; then + "$merge_tool_path" -wait -merge -3 -a1 \ + "$BASE" "$LOCAL" "$REMOTE" "$MERGED" \ + >/dev/null 2>&1 + else + "$merge_tool_path" -wait -2 \ + "$LOCAL" "$REMOTE" "$MERGED" \ + >/dev/null 2>&1 + fi + check_unchanged + else + "$merge_tool_path" -wait -2 "$LOCAL" "$REMOTE" \ + >/dev/null 2>&1 + fi + ;; + *) + merge_tool_cmd="$(get_merge_tool_cmd "$1")" + if test -z "$merge_tool_cmd"; then + if merge_mode; then + status=1 + fi + break + fi + if merge_mode; then + trust_exit_code="$(git config --bool \ + mergetool."$1".trustExitCode || echo false)" + if test "$trust_exit_code" = "false"; then + touch "$BACKUP" + ( eval $merge_tool_cmd ) + check_unchanged + else + ( eval $merge_tool_cmd ) + status=$? + fi + else + ( eval $merge_tool_cmd ) + fi + ;; + esac + return $status +} + +guess_merge_tool () { + if merge_mode; then + tools="tortoisemerge" + else + tools="kompare" + fi + if test -n "$DISPLAY"; then + if test -n "$GNOME_DESKTOP_SESSION_ID" ; then + tools="meld opendiff kdiff3 tkdiff xxdiff $tools" + else + tools="opendiff kdiff3 tkdiff xxdiff meld $tools" + fi + tools="$tools gvimdiff diffuse ecmerge p4merge araxis" + fi + case "${VISUAL:-$EDITOR}" in + *vim*) + tools="$tools vimdiff emerge" + ;; + *) + tools="$tools emerge vimdiff" + ;; + esac + echo >&2 "merge tool candidates: $tools" + + # Loop over each candidate and stop when a valid merge tool is found. + for i in $tools + do + merge_tool_path="$(translate_merge_tool_path "$i")" + if type "$merge_tool_path" > /dev/null 2>&1; then + echo "$i" + return 0 + fi + done + + echo >&2 "No known merge resolution program available." + return 1 +} + +get_configured_merge_tool () { + # Diff mode first tries diff.tool and falls back to merge.tool. + # Merge mode only checks merge.tool + if diff_mode; then + merge_tool=$(git config diff.tool || git config merge.tool) + else + merge_tool=$(git config merge.tool) + fi + if test -n "$merge_tool" && ! valid_tool "$merge_tool"; then + echo >&2 "git config option $TOOL_MODE.tool set to unknown tool: $merge_tool" + echo >&2 "Resetting to default..." + return 1 + fi + echo "$merge_tool" +} + +get_merge_tool_path () { + # A merge tool has been set, so verify that it's valid. + if test -n "$1"; then + merge_tool="$1" + else + merge_tool="$(get_merge_tool)" + fi + if ! valid_tool "$merge_tool"; then + echo >&2 "Unknown merge tool $merge_tool" + exit 1 + fi + if diff_mode; then + merge_tool_path=$(git config difftool."$merge_tool".path || + git config mergetool."$merge_tool".path) + else + merge_tool_path=$(git config mergetool."$merge_tool".path) + fi + if test -z "$merge_tool_path"; then + merge_tool_path="$(translate_merge_tool_path "$merge_tool")" + fi + if test -z "$(get_merge_tool_cmd "$merge_tool")" && + ! type "$merge_tool_path" > /dev/null 2>&1; then + echo >&2 "The $TOOL_MODE tool $merge_tool is not available as"\ + "'$merge_tool_path'" + exit 1 + fi + echo "$merge_tool_path" +} + +get_merge_tool () { + # Check if a merge tool has been configured + merge_tool=$(get_configured_merge_tool) + # Try to guess an appropriate merge tool if no tool has been set. + if test -z "$merge_tool"; then + merge_tool="$(guess_merge_tool)" || exit + fi + echo "$merge_tool" +} diff --git a/SparkleShare/Mac/git/libexec/git-core/git-mktag b/SparkleShare/Mac/git/libexec/git-core/git-mktag new file mode 120000 index 00000000..f2cccfc2 --- /dev/null +++ b/SparkleShare/Mac/git/libexec/git-core/git-mktag @@ -0,0 +1 @@ +../../bin/git \ No newline at end of file diff --git a/SparkleShare/Mac/git/libexec/git-core/git-mktree b/SparkleShare/Mac/git/libexec/git-core/git-mktree new file mode 120000 index 00000000..f2cccfc2 --- /dev/null +++ b/SparkleShare/Mac/git/libexec/git-core/git-mktree @@ -0,0 +1 @@ +../../bin/git \ No newline at end of file diff --git a/SparkleShare/Mac/git/libexec/git-core/git-mv b/SparkleShare/Mac/git/libexec/git-core/git-mv new file mode 120000 index 00000000..f2cccfc2 --- /dev/null +++ b/SparkleShare/Mac/git/libexec/git-core/git-mv @@ -0,0 +1 @@ +../../bin/git \ No newline at end of file diff --git a/SparkleShare/Mac/git/libexec/git-core/git-name-rev b/SparkleShare/Mac/git/libexec/git-core/git-name-rev new file mode 120000 index 00000000..f2cccfc2 --- /dev/null +++ b/SparkleShare/Mac/git/libexec/git-core/git-name-rev @@ -0,0 +1 @@ +../../bin/git \ No newline at end of file diff --git a/SparkleShare/Mac/git/libexec/git-core/git-notes b/SparkleShare/Mac/git/libexec/git-core/git-notes new file mode 120000 index 00000000..f2cccfc2 --- /dev/null +++ b/SparkleShare/Mac/git/libexec/git-core/git-notes @@ -0,0 +1 @@ +../../bin/git \ No newline at end of file diff --git a/SparkleShare/Mac/git/libexec/git-core/git-pack-objects b/SparkleShare/Mac/git/libexec/git-core/git-pack-objects new file mode 120000 index 00000000..f2cccfc2 --- /dev/null +++ b/SparkleShare/Mac/git/libexec/git-core/git-pack-objects @@ -0,0 +1 @@ +../../bin/git \ No newline at end of file diff --git a/SparkleShare/Mac/git/libexec/git-core/git-pack-redundant b/SparkleShare/Mac/git/libexec/git-core/git-pack-redundant new file mode 120000 index 00000000..f2cccfc2 --- /dev/null +++ b/SparkleShare/Mac/git/libexec/git-core/git-pack-redundant @@ -0,0 +1 @@ +../../bin/git \ No newline at end of file diff --git a/SparkleShare/Mac/git/libexec/git-core/git-pack-refs b/SparkleShare/Mac/git/libexec/git-core/git-pack-refs new file mode 120000 index 00000000..f2cccfc2 --- /dev/null +++ b/SparkleShare/Mac/git/libexec/git-core/git-pack-refs @@ -0,0 +1 @@ +../../bin/git \ No newline at end of file diff --git a/SparkleShare/Mac/git/libexec/git-core/git-parse-remote b/SparkleShare/Mac/git/libexec/git-core/git-parse-remote new file mode 100644 index 00000000..1cc2ba6e --- /dev/null +++ b/SparkleShare/Mac/git/libexec/git-core/git-parse-remote @@ -0,0 +1,101 @@ +#!/bin/sh + +# git-ls-remote could be called from outside a git managed repository; +# this would fail in that case and would issue an error message. +GIT_DIR=$(git rev-parse -q --git-dir) || :; + +get_data_source () { + case "$1" in + */*) + echo '' + ;; + .) + echo self + ;; + *) + if test "$(git config --get "remote.$1.url")" + then + echo config + elif test -f "$GIT_DIR/remotes/$1" + then + echo remotes + elif test -f "$GIT_DIR/branches/$1" + then + echo branches + else + echo '' + fi ;; + esac +} + +get_remote_url () { + data_source=$(get_data_source "$1") + case "$data_source" in + '') + echo "$1" + ;; + self) + echo "$1" + ;; + config) + git config --get "remote.$1.url" + ;; + remotes) + sed -ne '/^URL: */{ + s///p + q + }' "$GIT_DIR/remotes/$1" + ;; + branches) + sed -e 's/#.*//' "$GIT_DIR/branches/$1" + ;; + *) + die "internal error: get-remote-url $1" ;; + esac +} + +get_default_remote () { + curr_branch=$(git symbolic-ref -q HEAD | sed -e 's|^refs/heads/||') + origin=$(git config --get "branch.$curr_branch.remote") + echo ${origin:-origin} +} + +get_remote_merge_branch () { + case "$#" in + 0|1) + origin="$1" + default=$(get_default_remote) + test -z "$origin" && origin=$default + curr_branch=$(git symbolic-ref -q HEAD) && + [ "$origin" = "$default" ] && + echo $(git for-each-ref --format='%(upstream)' $curr_branch) + ;; + *) + repo=$1 + shift + ref=$1 + # FIXME: It should return the tracking branch + # Currently only works with the default mapping + case "$ref" in + +*) + ref=$(expr "z$ref" : 'z+\(.*\)') + ;; + esac + expr "z$ref" : 'z.*:' >/dev/null || ref="${ref}:" + remote=$(expr "z$ref" : 'z\([^:]*\):') + case "$remote" in + '' | HEAD ) remote=HEAD ;; + heads/*) remote=${remote#heads/} ;; + refs/heads/*) remote=${remote#refs/heads/} ;; + refs/* | tags/* | remotes/* ) remote= + esac + [ -n "$remote" ] && case "$repo" in + .) + echo "refs/heads/$remote" + ;; + *) + echo "refs/remotes/$repo/$remote" + ;; + esac + esac +} diff --git a/SparkleShare/Mac/git/libexec/git-core/git-patch-id b/SparkleShare/Mac/git/libexec/git-core/git-patch-id new file mode 120000 index 00000000..f2cccfc2 --- /dev/null +++ b/SparkleShare/Mac/git/libexec/git-core/git-patch-id @@ -0,0 +1 @@ +../../bin/git \ No newline at end of file diff --git a/SparkleShare/Mac/git/libexec/git-core/git-peek-remote b/SparkleShare/Mac/git/libexec/git-core/git-peek-remote new file mode 120000 index 00000000..f2cccfc2 --- /dev/null +++ b/SparkleShare/Mac/git/libexec/git-core/git-peek-remote @@ -0,0 +1 @@ +../../bin/git \ No newline at end of file diff --git a/SparkleShare/Mac/git/libexec/git-core/git-prune b/SparkleShare/Mac/git/libexec/git-core/git-prune new file mode 120000 index 00000000..f2cccfc2 --- /dev/null +++ b/SparkleShare/Mac/git/libexec/git-core/git-prune @@ -0,0 +1 @@ +../../bin/git \ No newline at end of file diff --git a/SparkleShare/Mac/git/libexec/git-core/git-prune-packed b/SparkleShare/Mac/git/libexec/git-core/git-prune-packed new file mode 120000 index 00000000..f2cccfc2 --- /dev/null +++ b/SparkleShare/Mac/git/libexec/git-core/git-prune-packed @@ -0,0 +1 @@ +../../bin/git \ No newline at end of file diff --git a/SparkleShare/Mac/git/libexec/git-core/git-pull b/SparkleShare/Mac/git/libexec/git-core/git-pull new file mode 100755 index 00000000..eb87f490 --- /dev/null +++ b/SparkleShare/Mac/git/libexec/git-core/git-pull @@ -0,0 +1,300 @@ +#!/bin/sh +# +# Copyright (c) 2005 Junio C Hamano +# +# Fetch one or more remote refs and merge it/them into the current HEAD. + +USAGE='[-n | --no-stat] [--[no-]commit] [--[no-]squash] [--[no-]ff] [-s strategy]... [] ...' +LONG_USAGE='Fetch one or more remote refs and merge it/them into the current HEAD.' +SUBDIRECTORY_OK=Yes +OPTIONS_SPEC= +. git-sh-setup +set_reflog_action "pull $*" +require_work_tree +cd_to_toplevel + + +die_conflict () { + git diff-index --cached --name-status -r --ignore-submodules HEAD -- + if [ $(git config --bool --get advice.resolveConflict || echo true) = "true" ]; then + die "Pull is not possible because you have unmerged files. +Please, fix them up in the work tree, and then use 'git add/rm ' +as appropriate to mark resolution, or use 'git commit -a'." + else + die "Pull is not possible because you have unmerged files." + fi +} + +die_merge () { + if [ $(git config --bool --get advice.resolveConflict || echo true) = "true" ]; then + die "You have not concluded your merge (MERGE_HEAD exists). +Please, commit your changes before you can merge." + else + die "You have not concluded your merge (MERGE_HEAD exists)." + fi +} + +test -z "$(git ls-files -u)" || die_conflict +test -f "$GIT_DIR/MERGE_HEAD" && die_merge + +strategy_args= diffstat= no_commit= squash= no_ff= ff_only= +log_arg= verbosity= progress= recurse_submodules= +merge_args= +curr_branch=$(git symbolic-ref -q HEAD) +curr_branch_short="${curr_branch#refs/heads/}" +rebase=$(git config --bool branch.$curr_branch_short.rebase) +dry_run= +while : +do + case "$1" in + -q|--quiet) + verbosity="$verbosity -q" ;; + -v|--verbose) + verbosity="$verbosity -v" ;; + --progress) + progress=--progress ;; + -n|--no-stat|--no-summary) + diffstat=--no-stat ;; + --stat|--summary) + diffstat=--stat ;; + --log|--no-log) + log_arg=$1 ;; + --no-c|--no-co|--no-com|--no-comm|--no-commi|--no-commit) + no_commit=--no-commit ;; + --c|--co|--com|--comm|--commi|--commit) + no_commit=--commit ;; + --sq|--squ|--squa|--squas|--squash) + squash=--squash ;; + --no-sq|--no-squ|--no-squa|--no-squas|--no-squash) + squash=--no-squash ;; + --ff) + no_ff=--ff ;; + --no-ff) + no_ff=--no-ff ;; + --ff-only) + ff_only=--ff-only ;; + -s=*|--s=*|--st=*|--str=*|--stra=*|--strat=*|--strate=*|\ + --strateg=*|--strategy=*|\ + -s|--s|--st|--str|--stra|--strat|--strate|--strateg|--strategy) + case "$#,$1" in + *,*=*) + strategy=`expr "z$1" : 'z-[^=]*=\(.*\)'` ;; + 1,*) + usage ;; + *) + strategy="$2" + shift ;; + esac + strategy_args="${strategy_args}-s $strategy " + ;; + -X*) + case "$#,$1" in + 1,-X) + usage ;; + *,-X) + xx="-X $(git rev-parse --sq-quote "$2")" + shift ;; + *,*) + xx=$(git rev-parse --sq-quote "$1") ;; + esac + merge_args="$merge_args$xx " + ;; + -r|--r|--re|--reb|--reba|--rebas|--rebase) + rebase=true + ;; + --no-r|--no-re|--no-reb|--no-reba|--no-rebas|--no-rebase) + rebase=false + ;; + --recurse-submodules) + recurse_submodules=--recurse-submodules + ;; + --no-recurse-submodules) + recurse_submodules=--no-recurse-submodules + ;; + --d|--dr|--dry|--dry-|--dry-r|--dry-ru|--dry-run) + dry_run=--dry-run + ;; + -h|--h|--he|--hel|--help) + usage + ;; + *) + # Pass thru anything that may be meant for fetch. + break + ;; + esac + shift +done + +error_on_no_merge_candidates () { + exec >&2 + for opt + do + case "$opt" in + -t|--t|--ta|--tag|--tags) + echo "Fetching tags only, you probably meant:" + echo " git fetch --tags" + exit 1 + esac + done + + if test true = "$rebase" + then + op_type=rebase + op_prep=against + else + op_type=merge + op_prep=with + fi + + curr_branch=${curr_branch#refs/heads/} + upstream=$(git config "branch.$curr_branch.merge") + remote=$(git config "branch.$curr_branch.remote") + + if [ $# -gt 1 ]; then + if [ "$rebase" = true ]; then + printf "There is no candidate for rebasing against " + else + printf "There are no candidates for merging " + fi + echo "among the refs that you just fetched." + echo "Generally this means that you provided a wildcard refspec which had no" + echo "matches on the remote end." + elif [ $# -gt 0 ] && [ "$1" != "$remote" ]; then + echo "You asked to pull from the remote '$1', but did not specify" + echo "a branch. Because this is not the default configured remote" + echo "for your current branch, you must specify a branch on the command line." + elif [ -z "$curr_branch" ]; then + echo "You are not currently on a branch, so I cannot use any" + echo "'branch..merge' in your configuration file." + echo "Please specify which remote branch you want to use on the command" + echo "line and try again (e.g. 'git pull ')." + echo "See git-pull(1) for details." + elif [ -z "$upstream" ]; then + echo "You asked me to pull without telling me which branch you" + echo "want to $op_type $op_prep, and 'branch.${curr_branch}.merge' in" + echo "your configuration file does not tell me, either. Please" + echo "specify which branch you want to use on the command line and" + echo "try again (e.g. 'git pull ')." + echo "See git-pull(1) for details." + echo + echo "If you often $op_type $op_prep the same branch, you may want to" + echo "use something like the following in your configuration file:" + echo + echo " [branch \"${curr_branch}\"]" + echo " remote = " + echo " merge = " + test rebase = "$op_type" && + echo " rebase = true" + echo + echo " [remote \"\"]" + echo " url = " + echo " fetch = " + echo + echo "See git-config(1) for details." + else + echo "Your configuration specifies to $op_type $op_prep the ref '${upstream#refs/heads/}'" + echo "from the remote, but no such ref was fetched." + fi + exit 1 +} + +test true = "$rebase" && { + if ! git rev-parse -q --verify HEAD >/dev/null + then + # On an unborn branch + if test -f "$GIT_DIR/index" + then + die "updating an unborn branch with changes added to the index" + fi + else + require_clean_work_tree "pull with rebase" "Please commit or stash them." + fi + oldremoteref= && + . git-parse-remote && + remoteref="$(get_remote_merge_branch "$@" 2>/dev/null)" && + oldremoteref="$(git rev-parse -q --verify "$remoteref")" && + for reflog in $(git rev-list -g $remoteref 2>/dev/null) + do + if test "$reflog" = "$(git merge-base $reflog $curr_branch)" + then + oldremoteref="$reflog" + break + fi + done +} +orig_head=$(git rev-parse -q --verify HEAD) +git fetch $verbosity $progress $dry_run $recurse_submodules --update-head-ok "$@" || exit 1 +test -z "$dry_run" || exit 0 + +curr_head=$(git rev-parse -q --verify HEAD) +if test -n "$orig_head" && test "$curr_head" != "$orig_head" +then + # The fetch involved updating the current branch. + + # The working tree and the index file is still based on the + # $orig_head commit, but we are merging into $curr_head. + # First update the working tree to match $curr_head. + + echo >&2 "Warning: fetch updated the current branch head." + echo >&2 "Warning: fast-forwarding your working tree from" + echo >&2 "Warning: commit $orig_head." + git update-index -q --refresh + git read-tree -u -m "$orig_head" "$curr_head" || + die 'Cannot fast-forward your working tree. +After making sure that you saved anything precious from +$ git diff '$orig_head' +output, run +$ git reset --hard +to recover.' + +fi + +merge_head=$(sed -e '/ not-for-merge /d' \ + -e 's/ .*//' "$GIT_DIR"/FETCH_HEAD | \ + tr '\012' ' ') + +case "$merge_head" in +'') + error_on_no_merge_candidates "$@" + ;; +?*' '?*) + if test -z "$orig_head" + then + die "Cannot merge multiple branches into empty head" + fi + if test true = "$rebase" + then + die "Cannot rebase onto multiple branches" + fi + ;; +esac + +if test -z "$orig_head" +then + git update-ref -m "initial pull" HEAD $merge_head "$curr_head" && + git read-tree --reset -u HEAD || exit 1 + exit +fi + +if test true = "$rebase" +then + o=$(git show-branch --merge-base $curr_branch $merge_head $oldremoteref) + if test "$oldremoteref" = "$o" + then + unset oldremoteref + fi +fi + +merge_name=$(git fmt-merge-msg $log_arg <"$GIT_DIR/FETCH_HEAD") || exit +case "$rebase" in +true) + eval="git-rebase $diffstat $strategy_args $merge_args" + eval="$eval --onto $merge_head ${oldremoteref:-$merge_head}" + ;; +*) + eval="git-merge $diffstat $no_commit $squash $no_ff $ff_only" + eval="$eval $log_arg $strategy_args $merge_args" + eval="$eval \"\$merge_name\" HEAD $merge_head $verbosity" + ;; +esac +eval "exec $eval" diff --git a/SparkleShare/Mac/git/libexec/git-core/git-push b/SparkleShare/Mac/git/libexec/git-core/git-push new file mode 120000 index 00000000..f2cccfc2 --- /dev/null +++ b/SparkleShare/Mac/git/libexec/git-core/git-push @@ -0,0 +1 @@ +../../bin/git \ No newline at end of file diff --git a/SparkleShare/Mac/git/libexec/git-core/git-quiltimport b/SparkleShare/Mac/git/libexec/git-core/git-quiltimport new file mode 100755 index 00000000..9a6ba2b9 --- /dev/null +++ b/SparkleShare/Mac/git/libexec/git-core/git-quiltimport @@ -0,0 +1,138 @@ +#!/bin/sh +OPTIONS_KEEPDASHDASH= +OPTIONS_SPEC="\ +git quiltimport [options] +-- +n,dry-run dry run +author= author name and email address for patches without any +patches= path to the quilt series and patches +" +SUBDIRECTORY_ON=Yes +. git-sh-setup + +dry_run="" +quilt_author="" +while test $# != 0 +do + case "$1" in + --author) + shift + quilt_author="$1" + ;; + -n|--dry-run) + dry_run=1 + ;; + --patches) + shift + QUILT_PATCHES="$1" + ;; + --) + shift + break;; + *) + usage + ;; + esac + shift +done + +# Quilt Author +if [ -n "$quilt_author" ] ; then + quilt_author_name=$(expr "z$quilt_author" : 'z\(.*[^ ]\) *<.*') && + quilt_author_email=$(expr "z$quilt_author" : '.*<\([^>]*\)') && + test '' != "$quilt_author_name" && + test '' != "$quilt_author_email" || + die "malformed --author parameter" +fi + +# Quilt patch directory +: ${QUILT_PATCHES:=patches} +if ! [ -d "$QUILT_PATCHES" ] ; then + echo "The \"$QUILT_PATCHES\" directory does not exist." + exit 1 +fi + +# Temporary directories +tmp_dir="$GIT_DIR"/rebase-apply +tmp_msg="$tmp_dir/msg" +tmp_patch="$tmp_dir/patch" +tmp_info="$tmp_dir/info" + + +# Find the intial commit +commit=$(git rev-parse HEAD) + +mkdir $tmp_dir || exit 2 +while read patch_name level garbage <&3 +do + case "$patch_name" in ''|'#'*) continue;; esac + case "$level" in + -p*) ;; + ''|'#'*) + level=;; + *) + echo "unable to parse patch level, ignoring it." + level=;; + esac + case "$garbage" in + ''|'#'*);; + *) + echo "trailing garbage found in series file: $garbage" + exit 1;; + esac + if ! [ -f "$QUILT_PATCHES/$patch_name" ] ; then + echo "$patch_name doesn't exist. Skipping." + continue + fi + echo $patch_name + git mailinfo "$tmp_msg" "$tmp_patch" \ + <"$QUILT_PATCHES/$patch_name" >"$tmp_info" || exit 3 + test -s "$tmp_patch" || { + echo "Patch is empty. Was it split wrong?" + exit 1 + } + + # Parse the author information + GIT_AUTHOR_NAME=$(sed -ne 's/Author: //p' "$tmp_info") + GIT_AUTHOR_EMAIL=$(sed -ne 's/Email: //p' "$tmp_info") + export GIT_AUTHOR_NAME GIT_AUTHOR_EMAIL + while test -z "$GIT_AUTHOR_EMAIL" && test -z "$GIT_AUTHOR_NAME" ; do + if [ -n "$quilt_author" ] ; then + GIT_AUTHOR_NAME="$quilt_author_name"; + GIT_AUTHOR_EMAIL="$quilt_author_email"; + elif [ -n "$dry_run" ]; then + echo "No author found in $patch_name" >&2; + GIT_AUTHOR_NAME="dry-run-not-found"; + GIT_AUTHOR_EMAIL="dry-run-not-found"; + else + echo "No author found in $patch_name" >&2; + echo "---" + cat $tmp_msg + printf "Author: "; + read patch_author + + echo "$patch_author" + + patch_author_name=$(expr "z$patch_author" : 'z\(.*[^ ]\) *<.*') && + patch_author_email=$(expr "z$patch_author" : '.*<\([^>]*\)') && + test '' != "$patch_author_name" && + test '' != "$patch_author_email" && + GIT_AUTHOR_NAME="$patch_author_name" && + GIT_AUTHOR_EMAIL="$patch_author_email" + fi + done + GIT_AUTHOR_DATE=$(sed -ne 's/Date: //p' "$tmp_info") + SUBJECT=$(sed -ne 's/Subject: //p' "$tmp_info") + export GIT_AUTHOR_DATE SUBJECT + if [ -z "$SUBJECT" ] ; then + SUBJECT=$(echo $patch_name | sed -e 's/.patch$//') + fi + + if [ -z "$dry_run" ] ; then + git apply --index -C1 ${level:+"$level"} "$tmp_patch" && + tree=$(git write-tree) && + commit=$( (echo "$SUBJECT"; echo; cat "$tmp_msg") | git commit-tree $tree -p $commit) && + git update-ref -m "quiltimport: $patch_name" HEAD $commit || exit 4 + fi +done 3<"$QUILT_PATCHES/series" +rm -rf $tmp_dir || exit 5 diff --git a/SparkleShare/Mac/git/libexec/git-core/git-read-tree b/SparkleShare/Mac/git/libexec/git-core/git-read-tree new file mode 120000 index 00000000..f2cccfc2 --- /dev/null +++ b/SparkleShare/Mac/git/libexec/git-core/git-read-tree @@ -0,0 +1 @@ +../../bin/git \ No newline at end of file diff --git a/SparkleShare/Mac/git/libexec/git-core/git-rebase b/SparkleShare/Mac/git/libexec/git-core/git-rebase new file mode 100755 index 00000000..cbb0ea90 --- /dev/null +++ b/SparkleShare/Mac/git/libexec/git-core/git-rebase @@ -0,0 +1,603 @@ +#!/bin/sh +# +# Copyright (c) 2005 Junio C Hamano. +# + +USAGE='[--interactive | -i] [-v] [--force-rebase | -f] [--no-ff] [--onto ] (|--root) [] [--quiet | -q]' +LONG_USAGE='git-rebase replaces with a new branch of the +same name. When the --onto option is provided the new branch starts +out with a HEAD equal to , otherwise it is equal to +It then attempts to create a new commit for each commit from the original + that does not exist in the branch. + +It is possible that a merge failure will prevent this process from being +completely automatic. You will have to resolve any such merge failure +and run git rebase --continue. Another option is to bypass the commit +that caused the merge failure with git rebase --skip. To restore the +original and remove the .git/rebase-apply working files, use the +command git rebase --abort instead. + +Note that if is not specified on the command line, the +currently checked out branch is used. + +Example: git-rebase master~1 topic + + A---B---C topic A'\''--B'\''--C'\'' topic + / --> / + D---E---F---G master D---E---F---G master +' + +SUBDIRECTORY_OK=Yes +OPTIONS_SPEC= +. git-sh-setup +set_reflog_action rebase +require_work_tree +cd_to_toplevel + +LF=' +' +OK_TO_SKIP_PRE_REBASE= +RESOLVEMSG=" +When you have resolved this problem run \"git rebase --continue\". +If you would prefer to skip this patch, instead run \"git rebase --skip\". +To restore the original branch and stop rebasing run \"git rebase --abort\". +" +unset newbase +strategy=recursive +strategy_opts= +do_merge= +dotest="$GIT_DIR"/rebase-merge +prec=4 +verbose= +diffstat= +test "$(git config --bool rebase.stat)" = true && diffstat=t +git_am_opt= +rebase_root= +force_rebase= +allow_rerere_autoupdate= + +continue_merge () { + test -n "$prev_head" || die "prev_head must be defined" + test -d "$dotest" || die "$dotest directory does not exist" + + unmerged=$(git ls-files -u) + if test -n "$unmerged" + then + echo "You still have unmerged paths in your index" + echo "did you forget to use git add?" + die "$RESOLVEMSG" + fi + + cmt=`cat "$dotest/current"` + if ! git diff-index --quiet --ignore-submodules HEAD -- + then + if ! git commit --no-verify -C "$cmt" + then + echo "Commit failed, please do not call \"git commit\"" + echo "directly, but instead do one of the following: " + die "$RESOLVEMSG" + fi + if test -z "$GIT_QUIET" + then + printf "Committed: %0${prec}d " $msgnum + fi + echo "$cmt $(git rev-parse HEAD^0)" >> "$dotest/rewritten" + else + if test -z "$GIT_QUIET" + then + printf "Already applied: %0${prec}d " $msgnum + fi + fi + test -z "$GIT_QUIET" && + GIT_PAGER='' git log --format=%s -1 "$cmt" + + prev_head=`git rev-parse HEAD^0` + # save the resulting commit so we can read-tree on it later + echo "$prev_head" > "$dotest/prev_head" + + # onto the next patch: + msgnum=$(($msgnum + 1)) + echo "$msgnum" >"$dotest/msgnum" +} + +call_merge () { + cmt="$(cat "$dotest/cmt.$1")" + echo "$cmt" > "$dotest/current" + hd=$(git rev-parse --verify HEAD) + cmt_name=$(git symbolic-ref HEAD 2> /dev/null || echo HEAD) + msgnum=$(cat "$dotest/msgnum") + end=$(cat "$dotest/end") + eval GITHEAD_$cmt='"${cmt_name##refs/heads/}~$(($end - $msgnum))"' + eval GITHEAD_$hd='$(cat "$dotest/onto_name")' + export GITHEAD_$cmt GITHEAD_$hd + if test -n "$GIT_QUIET" + then + GIT_MERGE_VERBOSITY=1 && export GIT_MERGE_VERBOSITY + fi + eval 'git-merge-$strategy' $strategy_opts '"$cmt^" -- "$hd" "$cmt"' + rv=$? + case "$rv" in + 0) + unset GITHEAD_$cmt GITHEAD_$hd + return + ;; + 1) + git rerere $allow_rerere_autoupdate + die "$RESOLVEMSG" + ;; + 2) + echo "Strategy: $rv $strategy failed, try another" 1>&2 + die "$RESOLVEMSG" + ;; + *) + die "Unknown exit code ($rv) from command:" \ + "git-merge-$strategy $cmt^ -- HEAD $cmt" + ;; + esac +} + +move_to_original_branch () { + test -z "$head_name" && + head_name="$(cat "$dotest"/head-name)" && + onto="$(cat "$dotest"/onto)" && + orig_head="$(cat "$dotest"/orig-head)" + case "$head_name" in + refs/*) + message="rebase finished: $head_name onto $onto" + git update-ref -m "$message" \ + $head_name $(git rev-parse HEAD) $orig_head && + git symbolic-ref HEAD $head_name || + die "Could not move back to $head_name" + ;; + esac +} + +finish_rb_merge () { + move_to_original_branch + git notes copy --for-rewrite=rebase < "$dotest"/rewritten + if test -x "$GIT_DIR"/hooks/post-rewrite && + test -s "$dotest"/rewritten; then + "$GIT_DIR"/hooks/post-rewrite rebase < "$dotest"/rewritten + fi + rm -r "$dotest" + say All done. +} + +is_interactive () { + while test $# != 0 + do + case "$1" in + -i|--interactive) + interactive_rebase=explicit + break + ;; + -p|--preserve-merges) + interactive_rebase=implied + ;; + esac + shift + done + + if [ "$interactive_rebase" = implied ]; then + GIT_EDITOR=: + export GIT_EDITOR + fi + + test -n "$interactive_rebase" || test -f "$dotest"/interactive +} + +run_pre_rebase_hook () { + if test -z "$OK_TO_SKIP_PRE_REBASE" && + test -x "$GIT_DIR/hooks/pre-rebase" + then + "$GIT_DIR/hooks/pre-rebase" ${1+"$@"} || + die "The pre-rebase hook refused to rebase." + fi +} + +test -f "$GIT_DIR"/rebase-apply/applying && + die 'It looks like git-am is in progress. Cannot rebase.' + +is_interactive "$@" && exec git-rebase--interactive "$@" + +while test $# != 0 +do + case "$1" in + --no-verify) + OK_TO_SKIP_PRE_REBASE=yes + ;; + --verify) + OK_TO_SKIP_PRE_REBASE= + ;; + --continue) + test -d "$dotest" -o -d "$GIT_DIR"/rebase-apply || + die "No rebase in progress?" + + git update-index --ignore-submodules --refresh && + git diff-files --quiet --ignore-submodules || { + echo "You must edit all merge conflicts and then" + echo "mark them as resolved using git add" + exit 1 + } + if test -d "$dotest" + then + prev_head=$(cat "$dotest/prev_head") + end=$(cat "$dotest/end") + msgnum=$(cat "$dotest/msgnum") + onto=$(cat "$dotest/onto") + GIT_QUIET=$(cat "$dotest/quiet") + continue_merge + while test "$msgnum" -le "$end" + do + call_merge "$msgnum" + continue_merge + done + finish_rb_merge + exit + fi + head_name=$(cat "$GIT_DIR"/rebase-apply/head-name) && + onto=$(cat "$GIT_DIR"/rebase-apply/onto) && + orig_head=$(cat "$GIT_DIR"/rebase-apply/orig-head) && + GIT_QUIET=$(cat "$GIT_DIR"/rebase-apply/quiet) + git am --resolved --3way --resolvemsg="$RESOLVEMSG" && + move_to_original_branch + exit + ;; + --skip) + test -d "$dotest" -o -d "$GIT_DIR"/rebase-apply || + die "No rebase in progress?" + + git reset --hard HEAD || exit $? + if test -d "$dotest" + then + git rerere clear + prev_head=$(cat "$dotest/prev_head") + end=$(cat "$dotest/end") + msgnum=$(cat "$dotest/msgnum") + msgnum=$(($msgnum + 1)) + onto=$(cat "$dotest/onto") + GIT_QUIET=$(cat "$dotest/quiet") + while test "$msgnum" -le "$end" + do + call_merge "$msgnum" + continue_merge + done + finish_rb_merge + exit + fi + head_name=$(cat "$GIT_DIR"/rebase-apply/head-name) && + onto=$(cat "$GIT_DIR"/rebase-apply/onto) && + orig_head=$(cat "$GIT_DIR"/rebase-apply/orig-head) && + GIT_QUIET=$(cat "$GIT_DIR"/rebase-apply/quiet) + git am -3 --skip --resolvemsg="$RESOLVEMSG" && + move_to_original_branch + exit + ;; + --abort) + test -d "$dotest" -o -d "$GIT_DIR"/rebase-apply || + die "No rebase in progress?" + + git rerere clear + + test -d "$dotest" || dotest="$GIT_DIR"/rebase-apply + + head_name="$(cat "$dotest"/head-name)" && + case "$head_name" in + refs/*) + git symbolic-ref HEAD $head_name || + die "Could not move back to $head_name" + ;; + esac + git reset --hard $(cat "$dotest/orig-head") + rm -r "$dotest" + exit + ;; + --onto) + test 2 -le "$#" || usage + newbase="$2" + shift + ;; + -M|-m|--m|--me|--mer|--merg|--merge) + do_merge=t + ;; + -X*|--strategy-option*) + case "$#,$1" in + 1,-X|1,--strategy-option) + usage ;; + *,-X|*,--strategy-option) + newopt="$2" + shift ;; + *,--strategy-option=*) + newopt="$(expr " $1" : ' --strategy-option=\(.*\)')" ;; + *,-X*) + newopt="$(expr " $1" : ' -X\(.*\)')" ;; + 1,*) + usage ;; + esac + strategy_opts="$strategy_opts $(git rev-parse --sq-quote "--$newopt")" + do_merge=t + ;; + -s=*|--s=*|--st=*|--str=*|--stra=*|--strat=*|--strate=*|\ + --strateg=*|--strategy=*|\ + -s|--s|--st|--str|--stra|--strat|--strate|--strateg|--strategy) + case "$#,$1" in + *,*=*) + strategy=`expr "z$1" : 'z-[^=]*=\(.*\)'` ;; + 1,*) + usage ;; + *) + strategy="$2" + shift ;; + esac + do_merge=t + ;; + -n|--no-stat) + diffstat= + ;; + --stat) + diffstat=t + ;; + -v|--verbose) + verbose=t + diffstat=t + GIT_QUIET= + ;; + -q|--quiet) + GIT_QUIET=t + git_am_opt="$git_am_opt -q" + verbose= + diffstat= + ;; + --whitespace=*) + git_am_opt="$git_am_opt $1" + case "$1" in + --whitespace=fix|--whitespace=strip) + force_rebase=t + ;; + esac + ;; + --ignore-whitespace) + git_am_opt="$git_am_opt $1" + ;; + --committer-date-is-author-date|--ignore-date) + git_am_opt="$git_am_opt $1" + force_rebase=t + ;; + -C*) + git_am_opt="$git_am_opt $1" + ;; + --root) + rebase_root=t + ;; + -f|--f|--fo|--for|--forc|--force|--force-r|--force-re|--force-reb|--force-reba|--force-rebas|--force-rebase|--no-ff) + force_rebase=t + ;; + --rerere-autoupdate|--no-rerere-autoupdate) + allow_rerere_autoupdate="$1" + ;; + -*) + usage + ;; + *) + break + ;; + esac + shift +done +test $# -gt 2 && usage + +if test $# -eq 0 && test -z "$rebase_root" +then + test -d "$dotest" -o -d "$GIT_DIR"/rebase-apply || usage + test -d "$dotest" -o -f "$GIT_DIR"/rebase-apply/rebasing && + die 'A rebase is in progress, try --continue, --skip or --abort.' +fi + +# Make sure we do not have $GIT_DIR/rebase-apply +if test -z "$do_merge" +then + if mkdir "$GIT_DIR"/rebase-apply 2>/dev/null + then + rmdir "$GIT_DIR"/rebase-apply + else + echo >&2 ' +It seems that I cannot create a rebase-apply directory, and +I wonder if you are in the middle of patch application or another +rebase. If that is not the case, please + rm -fr '"$GIT_DIR"'/rebase-apply +and run me again. I am stopping in case you still have something +valuable there.' + exit 1 + fi +else + if test -d "$dotest" + then + die "previous rebase directory $dotest still exists." \ + 'Try git rebase (--continue | --abort | --skip)' + fi +fi + +require_clean_work_tree "rebase" "Please commit or stash them." + +if test -z "$rebase_root" +then + # The upstream head must be given. Make sure it is valid. + upstream_name="$1" + shift + upstream=`git rev-parse --verify "${upstream_name}^0"` || + die "invalid upstream $upstream_name" + unset root_flag + upstream_arg="$upstream_name" +else + test -z "$newbase" && die "--root must be used with --onto" + unset upstream_name + unset upstream + root_flag="--root" + upstream_arg="$root_flag" +fi + +# Make sure the branch to rebase onto is valid. +onto_name=${newbase-"$upstream_name"} +case "$onto_name" in +*...*) + if left=${onto_name%...*} right=${onto_name#*...} && + onto=$(git merge-base --all ${left:-HEAD} ${right:-HEAD}) + then + case "$onto" in + ?*"$LF"?*) + die "$onto_name: there are more than one merge bases" + ;; + '') + die "$onto_name: there is no merge base" + ;; + esac + else + die "$onto_name: there is no merge base" + fi + ;; +*) + onto=$(git rev-parse --verify "${onto_name}^0") || exit + ;; +esac + +# If a hook exists, give it a chance to interrupt +run_pre_rebase_hook "$upstream_arg" "$@" + +# If the branch to rebase is given, that is the branch we will rebase +# $branch_name -- branch being rebased, or HEAD (already detached) +# $orig_head -- commit object name of tip of the branch before rebasing +# $head_name -- refs/heads/ or "detached HEAD" +switch_to= +case "$#" in +1) + # Is it "rebase other $branchname" or "rebase other $commit"? + branch_name="$1" + switch_to="$1" + + if git show-ref --verify --quiet -- "refs/heads/$1" && + branch=$(git rev-parse -q --verify "refs/heads/$1") + then + head_name="refs/heads/$1" + elif branch=$(git rev-parse -q --verify "$1") + then + head_name="detached HEAD" + else + echo >&2 "fatal: no such branch: $1" + usage + fi + ;; +*) + # Do not need to switch branches, we are already on it. + if branch_name=`git symbolic-ref -q HEAD` + then + head_name=$branch_name + branch_name=`expr "z$branch_name" : 'zrefs/heads/\(.*\)'` + else + head_name="detached HEAD" + branch_name=HEAD ;# detached + fi + branch=$(git rev-parse --verify "${branch_name}^0") || exit + ;; +esac +orig_head=$branch + +# Now we are rebasing commits $upstream..$branch (or with --root, +# everything leading up to $branch) on top of $onto + +# Check if we are already based on $onto with linear history, +# but this should be done only when upstream and onto are the same. +mb=$(git merge-base "$onto" "$branch") +if test "$upstream" = "$onto" && test "$mb" = "$onto" && + # linear history? + ! (git rev-list --parents "$onto".."$branch" | sane_grep " .* ") > /dev/null +then + if test -z "$force_rebase" + then + # Lazily switch to the target branch if needed... + test -z "$switch_to" || git checkout "$switch_to" -- + say "Current branch $branch_name is up to date." + exit 0 + else + say "Current branch $branch_name is up to date, rebase forced." + fi +fi + +# Detach HEAD and reset the tree +say "First, rewinding head to replay your work on top of it..." +git checkout -q "$onto^0" || die "could not detach HEAD" +git update-ref ORIG_HEAD $branch + +if test -n "$diffstat" +then + if test -n "$verbose" + then + echo "Changes from $mb to $onto:" + fi + # We want color (if set), but no pager + GIT_PAGER='' git diff --stat --summary "$mb" "$onto" +fi + +# If the $onto is a proper descendant of the tip of the branch, then +# we just fast-forwarded. +if test "$mb" = "$branch" +then + say "Fast-forwarded $branch_name to $onto_name." + move_to_original_branch + exit 0 +fi + +if test -n "$rebase_root" +then + revisions="$onto..$orig_head" +else + revisions="$upstream..$orig_head" +fi + +if test -z "$do_merge" +then + git format-patch -k --stdout --full-index --ignore-if-in-upstream \ + --src-prefix=a/ --dst-prefix=b/ \ + --no-renames $root_flag "$revisions" | + git am $git_am_opt --rebasing --resolvemsg="$RESOLVEMSG" && + move_to_original_branch + ret=$? + test 0 != $ret -a -d "$GIT_DIR"/rebase-apply && + echo $head_name > "$GIT_DIR"/rebase-apply/head-name && + echo $onto > "$GIT_DIR"/rebase-apply/onto && + echo $orig_head > "$GIT_DIR"/rebase-apply/orig-head && + echo "$GIT_QUIET" > "$GIT_DIR"/rebase-apply/quiet + exit $ret +fi + +# start doing a rebase with git-merge +# this is rename-aware if the recursive (default) strategy is used + +mkdir -p "$dotest" +echo "$onto" > "$dotest/onto" +echo "$onto_name" > "$dotest/onto_name" +prev_head=$orig_head +echo "$prev_head" > "$dotest/prev_head" +echo "$orig_head" > "$dotest/orig-head" +echo "$head_name" > "$dotest/head-name" +echo "$GIT_QUIET" > "$dotest/quiet" + +msgnum=0 +for cmt in `git rev-list --reverse --no-merges "$revisions"` +do + msgnum=$(($msgnum + 1)) + echo "$cmt" > "$dotest/cmt.$msgnum" +done + +echo 1 >"$dotest/msgnum" +echo $msgnum >"$dotest/end" + +end=$msgnum +msgnum=1 + +while test "$msgnum" -le "$end" +do + call_merge "$msgnum" + continue_merge +done + +finish_rb_merge diff --git a/SparkleShare/Mac/git/libexec/git-core/git-rebase--interactive b/SparkleShare/Mac/git/libexec/git-core/git-rebase--interactive new file mode 100755 index 00000000..4ae02c0c --- /dev/null +++ b/SparkleShare/Mac/git/libexec/git-core/git-rebase--interactive @@ -0,0 +1,1049 @@ +#!/bin/sh +# +# Copyright (c) 2006 Johannes E. Schindelin + +# SHORT DESCRIPTION +# +# This script makes it easy to fix up commits in the middle of a series, +# and rearrange commits. +# +# The original idea comes from Eric W. Biederman, in +# http://article.gmane.org/gmane.comp.version-control.git/22407 + +OPTIONS_KEEPDASHDASH= +OPTIONS_SPEC="\ +git-rebase [-i] [options] [--] [] +git-rebase [-i] (--continue | --abort | --skip) +-- + Available options are +v,verbose display a diffstat of what changed upstream +onto= rebase onto given branch instead of upstream +p,preserve-merges try to recreate merges instead of ignoring them +s,strategy= use the given merge strategy +no-ff cherry-pick all commits, even if unchanged +m,merge always used (no-op) +i,interactive always used (no-op) + Actions: +continue continue rebasing process +abort abort rebasing process and restore original branch +skip skip current patch and continue rebasing process +no-verify override pre-rebase hook from stopping the operation +verify allow pre-rebase hook to run +root rebase all reachable commmits up to the root(s) +autosquash move commits that begin with squash!/fixup! under -i +" + +. git-sh-setup +require_work_tree + +DOTEST="$GIT_DIR/rebase-merge" + +# The file containing rebase commands, comments, and empty lines. +# This file is created by "git rebase -i" then edited by the user. As +# the lines are processed, they are removed from the front of this +# file and written to the tail of $DONE. +TODO="$DOTEST"/git-rebase-todo + +# The rebase command lines that have already been processed. A line +# is moved here when it is first handled, before any associated user +# actions. +DONE="$DOTEST"/done + +# The commit message that is planned to be used for any changes that +# need to be committed following a user interaction. +MSG="$DOTEST"/message + +# The file into which is accumulated the suggested commit message for +# squash/fixup commands. When the first of a series of squash/fixups +# is seen, the file is created and the commit message from the +# previous commit and from the first squash/fixup commit are written +# to it. The commit message for each subsequent squash/fixup commit +# is appended to the file as it is processed. +# +# The first line of the file is of the form +# # This is a combination of $COUNT commits. +# where $COUNT is the number of commits whose messages have been +# written to the file so far (including the initial "pick" commit). +# Each time that a commit message is processed, this line is read and +# updated. It is deleted just before the combined commit is made. +SQUASH_MSG="$DOTEST"/message-squash + +# If the current series of squash/fixups has not yet included a squash +# command, then this file exists and holds the commit message of the +# original "pick" commit. (If the series ends without a "squash" +# command, then this can be used as the commit message of the combined +# commit without opening the editor.) +FIXUP_MSG="$DOTEST"/message-fixup + +# $REWRITTEN is the name of a directory containing files for each +# commit that is reachable by at least one merge base of $HEAD and +# $UPSTREAM. They are not necessarily rewritten, but their children +# might be. This ensures that commits on merged, but otherwise +# unrelated side branches are left alone. (Think "X" in the man page's +# example.) +REWRITTEN="$DOTEST"/rewritten + +DROPPED="$DOTEST"/dropped + +# A script to set the GIT_AUTHOR_NAME, GIT_AUTHOR_EMAIL, and +# GIT_AUTHOR_DATE that will be used for the commit that is currently +# being rebased. +AUTHOR_SCRIPT="$DOTEST"/author-script + +# When an "edit" rebase command is being processed, the SHA1 of the +# commit to be edited is recorded in this file. When "git rebase +# --continue" is executed, if there are any staged changes then they +# will be amended to the HEAD commit, but only provided the HEAD +# commit is still the commit to be edited. When any other rebase +# command is processed, this file is deleted. +AMEND="$DOTEST"/amend + +# For the post-rewrite hook, we make a list of rewritten commits and +# their new sha1s. The rewritten-pending list keeps the sha1s of +# commits that have been processed, but not committed yet, +# e.g. because they are waiting for a 'squash' command. +REWRITTEN_LIST="$DOTEST"/rewritten-list +REWRITTEN_PENDING="$DOTEST"/rewritten-pending + +PRESERVE_MERGES= +STRATEGY= +ONTO= +VERBOSE= +OK_TO_SKIP_PRE_REBASE= +REBASE_ROOT= +AUTOSQUASH= +test "$(git config --bool rebase.autosquash)" = "true" && AUTOSQUASH=t +NEVER_FF= + +GIT_CHERRY_PICK_HELP="\ +hint: after resolving the conflicts, mark the corrected paths +hint: with 'git add ' and run 'git rebase --continue'" +export GIT_CHERRY_PICK_HELP + +warn () { + printf '%s\n' "$*" >&2 +} + +output () { + case "$VERBOSE" in + '') + output=$("$@" 2>&1 ) + status=$? + test $status != 0 && printf "%s\n" "$output" + return $status + ;; + *) + "$@" + ;; + esac +} + +# Output the commit message for the specified commit. +commit_message () { + git cat-file commit "$1" | sed "1,/^$/d" +} + +run_pre_rebase_hook () { + if test -z "$OK_TO_SKIP_PRE_REBASE" && + test -x "$GIT_DIR/hooks/pre-rebase" + then + "$GIT_DIR/hooks/pre-rebase" ${1+"$@"} || { + echo >&2 "The pre-rebase hook refused to rebase." + exit 1 + } + fi +} + + +ORIG_REFLOG_ACTION="$GIT_REFLOG_ACTION" + +comment_for_reflog () { + case "$ORIG_REFLOG_ACTION" in + ''|rebase*) + GIT_REFLOG_ACTION="rebase -i ($1)" + export GIT_REFLOG_ACTION + ;; + esac +} + +last_count= +mark_action_done () { + sed -e 1q < "$TODO" >> "$DONE" + sed -e 1d < "$TODO" >> "$TODO".new + mv -f "$TODO".new "$TODO" + count=$(sane_grep -c '^[^#]' < "$DONE") + total=$(($count+$(sane_grep -c '^[^#]' < "$TODO"))) + if test "$last_count" != "$count" + then + last_count=$count + printf "Rebasing (%d/%d)\r" $count $total + test -z "$VERBOSE" || echo + fi +} + +make_patch () { + sha1_and_parents="$(git rev-list --parents -1 "$1")" + case "$sha1_and_parents" in + ?*' '?*' '?*) + git diff --cc $sha1_and_parents + ;; + ?*' '?*) + git diff-tree -p "$1^!" + ;; + *) + echo "Root commit" + ;; + esac > "$DOTEST"/patch + test -f "$MSG" || + commit_message "$1" > "$MSG" + test -f "$AUTHOR_SCRIPT" || + get_author_ident_from_commit "$1" > "$AUTHOR_SCRIPT" +} + +die_with_patch () { + echo "$1" > "$DOTEST"/stopped-sha + make_patch "$1" + git rerere + die "$2" +} + +die_abort () { + rm -rf "$DOTEST" + die "$1" +} + +has_action () { + sane_grep '^[^#]' "$1" >/dev/null +} + +# Run command with GIT_AUTHOR_NAME, GIT_AUTHOR_EMAIL, and +# GIT_AUTHOR_DATE exported from the current environment. +do_with_author () { + ( + export GIT_AUTHOR_NAME GIT_AUTHOR_EMAIL GIT_AUTHOR_DATE + "$@" + ) +} + +pick_one () { + ff=--ff + case "$1" in -n) sha1=$2; ff= ;; *) sha1=$1 ;; esac + case "$NEVER_FF" in '') ;; ?*) ff= ;; esac + output git rev-parse --verify $sha1 || die "Invalid commit name: $sha1" + test -d "$REWRITTEN" && + pick_one_preserving_merges "$@" && return + if test -n "$REBASE_ROOT" + then + output git cherry-pick "$@" + return + fi + output git cherry-pick $ff "$@" +} + +pick_one_preserving_merges () { + fast_forward=t + case "$1" in + -n) + fast_forward=f + sha1=$2 + ;; + *) + sha1=$1 + ;; + esac + sha1=$(git rev-parse $sha1) + + if test -f "$DOTEST"/current-commit + then + if test "$fast_forward" = t + then + while read current_commit + do + git rev-parse HEAD > "$REWRITTEN"/$current_commit + done <"$DOTEST"/current-commit + rm "$DOTEST"/current-commit || + die "Cannot write current commit's replacement sha1" + fi + fi + + echo $sha1 >> "$DOTEST"/current-commit + + # rewrite parents; if none were rewritten, we can fast-forward. + new_parents= + pend=" $(git rev-list --parents -1 $sha1 | cut -d' ' -s -f2-)" + if test "$pend" = " " + then + pend=" root" + fi + while [ "$pend" != "" ] + do + p=$(expr "$pend" : ' \([^ ]*\)') + pend="${pend# $p}" + + if test -f "$REWRITTEN"/$p + then + new_p=$(cat "$REWRITTEN"/$p) + + # If the todo reordered commits, and our parent is marked for + # rewriting, but hasn't been gotten to yet, assume the user meant to + # drop it on top of the current HEAD + if test -z "$new_p" + then + new_p=$(git rev-parse HEAD) + fi + + test $p != $new_p && fast_forward=f + case "$new_parents" in + *$new_p*) + ;; # do nothing; that parent is already there + *) + new_parents="$new_parents $new_p" + ;; + esac + else + if test -f "$DROPPED"/$p + then + fast_forward=f + replacement="$(cat "$DROPPED"/$p)" + test -z "$replacement" && replacement=root + pend=" $replacement$pend" + else + new_parents="$new_parents $p" + fi + fi + done + case $fast_forward in + t) + output warn "Fast-forward to $sha1" + output git reset --hard $sha1 || + die "Cannot fast-forward to $sha1" + ;; + f) + first_parent=$(expr "$new_parents" : ' \([^ ]*\)') + + if [ "$1" != "-n" ] + then + # detach HEAD to current parent + output git checkout $first_parent 2> /dev/null || + die "Cannot move HEAD to $first_parent" + fi + + case "$new_parents" in + ' '*' '*) + test "a$1" = a-n && die "Refusing to squash a merge: $sha1" + + # redo merge + author_script=$(get_author_ident_from_commit $sha1) + eval "$author_script" + msg="$(commit_message $sha1)" + # No point in merging the first parent, that's HEAD + new_parents=${new_parents# $first_parent} + if ! do_with_author output \ + git merge $STRATEGY -m "$msg" $new_parents + then + printf "%s\n" "$msg" > "$GIT_DIR"/MERGE_MSG + die_with_patch $sha1 "Error redoing merge $sha1" + fi + echo "$sha1 $(git rev-parse HEAD^0)" >> "$REWRITTEN_LIST" + ;; + *) + output git cherry-pick "$@" || + die_with_patch $sha1 "Could not pick $sha1" + ;; + esac + ;; + esac +} + +nth_string () { + case "$1" in + *1[0-9]|*[04-9]) echo "$1"th;; + *1) echo "$1"st;; + *2) echo "$1"nd;; + *3) echo "$1"rd;; + esac +} + +update_squash_messages () { + if test -f "$SQUASH_MSG"; then + mv "$SQUASH_MSG" "$SQUASH_MSG".bak || exit + COUNT=$(($(sed -n \ + -e "1s/^# This is a combination of \(.*\) commits\./\1/p" \ + -e "q" < "$SQUASH_MSG".bak)+1)) + { + echo "# This is a combination of $COUNT commits." + sed -e 1d -e '2,/^./{ + /^$/d + }' <"$SQUASH_MSG".bak + } >"$SQUASH_MSG" + else + commit_message HEAD > "$FIXUP_MSG" || die "Cannot write $FIXUP_MSG" + COUNT=2 + { + echo "# This is a combination of 2 commits." + echo "# The first commit's message is:" + echo + cat "$FIXUP_MSG" + } >"$SQUASH_MSG" + fi + case $1 in + squash) + rm -f "$FIXUP_MSG" + echo + echo "# This is the $(nth_string $COUNT) commit message:" + echo + commit_message $2 + ;; + fixup) + echo + echo "# The $(nth_string $COUNT) commit message will be skipped:" + echo + commit_message $2 | sed -e 's/^/# /' + ;; + esac >>"$SQUASH_MSG" +} + +peek_next_command () { + sed -n -e "/^#/d" -e '/^$/d' -e "s/ .*//p" -e "q" < "$TODO" +} + +# A squash/fixup has failed. Prepare the long version of the squash +# commit message, then die_with_patch. This code path requires the +# user to edit the combined commit message for all commits that have +# been squashed/fixedup so far. So also erase the old squash +# messages, effectively causing the combined commit to be used as the +# new basis for any further squash/fixups. Args: sha1 rest +die_failed_squash() { + mv "$SQUASH_MSG" "$MSG" || exit + rm -f "$FIXUP_MSG" + cp "$MSG" "$GIT_DIR"/MERGE_MSG || exit + warn + warn "Could not apply $1... $2" + die_with_patch $1 "" +} + +flush_rewritten_pending() { + test -s "$REWRITTEN_PENDING" || return + newsha1="$(git rev-parse HEAD^0)" + sed "s/$/ $newsha1/" < "$REWRITTEN_PENDING" >> "$REWRITTEN_LIST" + rm -f "$REWRITTEN_PENDING" +} + +record_in_rewritten() { + oldsha1="$(git rev-parse $1)" + echo "$oldsha1" >> "$REWRITTEN_PENDING" + + case "$(peek_next_command)" in + squash|s|fixup|f) + ;; + *) + flush_rewritten_pending + ;; + esac +} + +do_next () { + rm -f "$MSG" "$AUTHOR_SCRIPT" "$AMEND" || exit + read -r command sha1 rest < "$TODO" + case "$command" in + '#'*|''|noop) + mark_action_done + ;; + pick|p) + comment_for_reflog pick + + mark_action_done + pick_one $sha1 || + die_with_patch $sha1 "Could not apply $sha1... $rest" + record_in_rewritten $sha1 + ;; + reword|r) + comment_for_reflog reword + + mark_action_done + pick_one $sha1 || + die_with_patch $sha1 "Could not apply $sha1... $rest" + git commit --amend --no-post-rewrite + record_in_rewritten $sha1 + ;; + edit|e) + comment_for_reflog edit + + mark_action_done + pick_one $sha1 || + die_with_patch $sha1 "Could not apply $sha1... $rest" + echo "$sha1" > "$DOTEST"/stopped-sha + make_patch $sha1 + git rev-parse --verify HEAD > "$AMEND" + warn "Stopped at $sha1... $rest" + warn "You can amend the commit now, with" + warn + warn " git commit --amend" + warn + warn "Once you are satisfied with your changes, run" + warn + warn " git rebase --continue" + warn + exit 0 + ;; + squash|s|fixup|f) + case "$command" in + squash|s) + squash_style=squash + ;; + fixup|f) + squash_style=fixup + ;; + esac + comment_for_reflog $squash_style + + test -f "$DONE" && has_action "$DONE" || + die "Cannot '$squash_style' without a previous commit" + + mark_action_done + update_squash_messages $squash_style $sha1 + author_script=$(get_author_ident_from_commit HEAD) + echo "$author_script" > "$AUTHOR_SCRIPT" + eval "$author_script" + output git reset --soft HEAD^ + pick_one -n $sha1 || die_failed_squash $sha1 "$rest" + case "$(peek_next_command)" in + squash|s|fixup|f) + # This is an intermediate commit; its message will only be + # used in case of trouble. So use the long version: + do_with_author output git commit --no-verify -F "$SQUASH_MSG" || + die_failed_squash $sha1 "$rest" + ;; + *) + # This is the final command of this squash/fixup group + if test -f "$FIXUP_MSG" + then + do_with_author git commit --no-verify -F "$FIXUP_MSG" || + die_failed_squash $sha1 "$rest" + else + cp "$SQUASH_MSG" "$GIT_DIR"/SQUASH_MSG || exit + rm -f "$GIT_DIR"/MERGE_MSG + do_with_author git commit --no-verify -e || + die_failed_squash $sha1 "$rest" + fi + rm -f "$SQUASH_MSG" "$FIXUP_MSG" + ;; + esac + record_in_rewritten $sha1 + ;; + x|"exec") + read -r command rest < "$TODO" + mark_action_done + printf 'Executing: %s\n' "$rest" + # "exec" command doesn't take a sha1 in the todo-list. + # => can't just use $sha1 here. + git rev-parse --verify HEAD > "$DOTEST"/stopped-sha + ${SHELL:-/bin/sh} -c "$rest" # Actual execution + status=$? + if test "$status" -ne 0 + then + warn "Execution failed: $rest" + warn "You can fix the problem, and then run" + warn + warn " git rebase --continue" + warn + exit "$status" + fi + # Run in subshell because require_clean_work_tree can die. + if ! (require_clean_work_tree "rebase") + then + warn "Commit or stash your changes, and then run" + warn + warn " git rebase --continue" + warn + exit 1 + fi + ;; + *) + warn "Unknown command: $command $sha1 $rest" + if git rev-parse --verify -q "$sha1" >/dev/null + then + die_with_patch $sha1 "Please fix this in the file $TODO." + else + die "Please fix this in the file $TODO." + fi + ;; + esac + test -s "$TODO" && return + + comment_for_reflog finish && + HEADNAME=$(cat "$DOTEST"/head-name) && + OLDHEAD=$(cat "$DOTEST"/head) && + SHORTONTO=$(git rev-parse --short $(cat "$DOTEST"/onto)) && + NEWHEAD=$(git rev-parse HEAD) && + case $HEADNAME in + refs/*) + message="$GIT_REFLOG_ACTION: $HEADNAME onto $SHORTONTO" && + git update-ref -m "$message" $HEADNAME $NEWHEAD $OLDHEAD && + git symbolic-ref HEAD $HEADNAME + ;; + esac && { + test ! -f "$DOTEST"/verbose || + git diff-tree --stat $(cat "$DOTEST"/head)..HEAD + } && + { + test -s "$REWRITTEN_LIST" && + git notes copy --for-rewrite=rebase < "$REWRITTEN_LIST" || + true # we don't care if this copying failed + } && + if test -x "$GIT_DIR"/hooks/post-rewrite && + test -s "$REWRITTEN_LIST"; then + "$GIT_DIR"/hooks/post-rewrite rebase < "$REWRITTEN_LIST" + true # we don't care if this hook failed + fi && + rm -rf "$DOTEST" && + git gc --auto && + warn "Successfully rebased and updated $HEADNAME." + + exit +} + +do_rest () { + while : + do + do_next + done +} + +# skip picking commits whose parents are unchanged +skip_unnecessary_picks () { + fd=3 + while read -r command rest + do + # fd=3 means we skip the command + case "$fd,$command" in + 3,pick|3,p) + # pick a commit whose parent is current $ONTO -> skip + sha1=${rest%% *} + case "$(git rev-parse --verify --quiet "$sha1"^)" in + "$ONTO"*) + ONTO=$sha1 + ;; + *) + fd=1 + ;; + esac + ;; + 3,#*|3,) + # copy comments + ;; + *) + fd=1 + ;; + esac + printf '%s\n' "$command${rest:+ }$rest" >&$fd + done <"$TODO" >"$TODO.new" 3>>"$DONE" && + mv -f "$TODO".new "$TODO" && + case "$(peek_next_command)" in + squash|s|fixup|f) + record_in_rewritten "$ONTO" + ;; + esac || + die "Could not skip unnecessary pick commands" +} + +# check if no other options are set +is_standalone () { + test $# -eq 2 -a "$2" = '--' && + test -z "$ONTO" && + test -z "$PRESERVE_MERGES" && + test -z "$STRATEGY" && + test -z "$VERBOSE" +} + +get_saved_options () { + test -d "$REWRITTEN" && PRESERVE_MERGES=t + test -f "$DOTEST"/strategy && STRATEGY="$(cat "$DOTEST"/strategy)" + test -f "$DOTEST"/verbose && VERBOSE=t + test -f "$DOTEST"/rebase-root && REBASE_ROOT=t +} + +# Rearrange the todo list that has both "pick sha1 msg" and +# "pick sha1 fixup!/squash! msg" appears in it so that the latter +# comes immediately after the former, and change "pick" to +# "fixup"/"squash". +rearrange_squash () { + # extract fixup!/squash! lines and resolve any referenced sha1's + while read -r pick sha1 message + do + case "$message" in + "squash! "*|"fixup! "*) + action="${message%%!*}" + rest="${message#*! }" + echo "$sha1 $action $rest" + # if it's a single word, try to resolve to a full sha1 and + # emit a second copy. This allows us to match on both message + # and on sha1 prefix + if test "${rest#* }" = "$rest"; then + fullsha="$(git rev-parse -q --verify "$rest" 2>/dev/null)" + if test -n "$fullsha"; then + # prefix the action to uniquely identify this line as + # intended for full sha1 match + echo "$sha1 +$action $fullsha" + fi + fi + esac + done >"$1.sq" <"$1" + test -s "$1.sq" || return + + used= + while read -r pick sha1 message + do + case " $used" in + *" $sha1 "*) continue ;; + esac + printf '%s\n' "$pick $sha1 $message" + used="$used$sha1 " + while read -r squash action msg + do + case " $used" in + *" $squash "*) continue ;; + esac + emit=0 + case "$action" in + +*) + action="${action#+}" + # full sha1 prefix test + case "$msg" in "$sha1"*) emit=1;; esac ;; + *) + # message prefix test + case "$message" in "$msg"*) emit=1;; esac ;; + esac + if test $emit = 1; then + printf '%s\n' "$action $squash $action! $msg" + used="$used$squash " + fi + done <"$1.sq" + done >"$1.rearranged" <"$1" + cat "$1.rearranged" >"$1" + rm -f "$1.sq" "$1.rearranged" +} + +LF=' +' +parse_onto () { + case "$1" in + *...*) + if left=${1%...*} right=${1#*...} && + onto=$(git merge-base --all ${left:-HEAD} ${right:-HEAD}) + then + case "$onto" in + ?*"$LF"?* | '') + exit 1 ;; + esac + echo "$onto" + exit 0 + fi + esac + git rev-parse --verify "$1^0" +} + +while test $# != 0 +do + case "$1" in + --no-verify) + OK_TO_SKIP_PRE_REBASE=yes + ;; + --verify) + OK_TO_SKIP_PRE_REBASE= + ;; + --continue) + is_standalone "$@" || usage + get_saved_options + comment_for_reflog continue + + test -d "$DOTEST" || die "No interactive rebase running" + + # Sanity check + git rev-parse --verify HEAD >/dev/null || + die "Cannot read HEAD" + git update-index --ignore-submodules --refresh && + git diff-files --quiet --ignore-submodules || + die "Working tree is dirty" + + # do we have anything to commit? + if git diff-index --cached --quiet --ignore-submodules HEAD -- + then + : Nothing to commit -- skip this + else + . "$AUTHOR_SCRIPT" || + die "Cannot find the author identity" + amend= + if test -f "$AMEND" + then + amend=$(git rev-parse --verify HEAD) + test "$amend" = $(cat "$AMEND") || + die "\ +You have uncommitted changes in your working tree. Please, commit them +first and then run 'git rebase --continue' again." + git reset --soft HEAD^ || + die "Cannot rewind the HEAD" + fi + do_with_author git commit --no-verify -F "$MSG" -e || { + test -n "$amend" && git reset --soft $amend + die "Could not commit staged changes." + } + fi + + record_in_rewritten "$(cat "$DOTEST"/stopped-sha)" + + require_clean_work_tree "rebase" + do_rest + ;; + --abort) + is_standalone "$@" || usage + get_saved_options + comment_for_reflog abort + + git rerere clear + test -d "$DOTEST" || die "No interactive rebase running" + + HEADNAME=$(cat "$DOTEST"/head-name) + HEAD=$(cat "$DOTEST"/head) + case $HEADNAME in + refs/*) + git symbolic-ref HEAD $HEADNAME + ;; + esac && + output git reset --hard $HEAD && + rm -rf "$DOTEST" + exit + ;; + --skip) + is_standalone "$@" || usage + get_saved_options + comment_for_reflog skip + + git rerere clear + test -d "$DOTEST" || die "No interactive rebase running" + + output git reset --hard && do_rest + ;; + -s) + case "$#,$1" in + *,*=*) + STRATEGY="-s "$(expr "z$1" : 'z-[^=]*=\(.*\)') ;; + 1,*) + usage ;; + *) + STRATEGY="-s $2" + shift ;; + esac + ;; + -m) + # we use merge anyway + ;; + -v) + VERBOSE=t + ;; + -p) + PRESERVE_MERGES=t + ;; + -i) + # yeah, we know + ;; + --no-ff) + NEVER_FF=t + ;; + --root) + REBASE_ROOT=t + ;; + --autosquash) + AUTOSQUASH=t + ;; + --no-autosquash) + AUTOSQUASH= + ;; + --onto) + shift + ONTO=$(parse_onto "$1") || + die "Does not point to a valid commit: $1" + ;; + --) + shift + test -z "$REBASE_ROOT" -a $# -ge 1 -a $# -le 2 || + test ! -z "$REBASE_ROOT" -a $# -le 1 || usage + test -d "$DOTEST" && + die "Interactive rebase already started" + + git var GIT_COMMITTER_IDENT >/dev/null || + die "You need to set your committer info first" + + if test -z "$REBASE_ROOT" + then + UPSTREAM_ARG="$1" + UPSTREAM=$(git rev-parse --verify "$1") || die "Invalid base" + test -z "$ONTO" && ONTO=$UPSTREAM + shift + else + UPSTREAM= + UPSTREAM_ARG=--root + test -z "$ONTO" && + die "You must specify --onto when using --root" + fi + run_pre_rebase_hook "$UPSTREAM_ARG" "$@" + + comment_for_reflog start + + require_clean_work_tree "rebase" "Please commit or stash them." + + if test ! -z "$1" + then + output git checkout "$1" -- || + die "Could not checkout $1" + fi + + HEAD=$(git rev-parse --verify HEAD) || die "No HEAD?" + mkdir "$DOTEST" || die "Could not create temporary $DOTEST" + + : > "$DOTEST"/interactive || die "Could not mark as interactive" + git symbolic-ref HEAD > "$DOTEST"/head-name 2> /dev/null || + echo "detached HEAD" > "$DOTEST"/head-name + + echo $HEAD > "$DOTEST"/head + case "$REBASE_ROOT" in + '') + rm -f "$DOTEST"/rebase-root ;; + *) + : >"$DOTEST"/rebase-root ;; + esac + echo $ONTO > "$DOTEST"/onto + test -z "$STRATEGY" || echo "$STRATEGY" > "$DOTEST"/strategy + test t = "$VERBOSE" && : > "$DOTEST"/verbose + if test t = "$PRESERVE_MERGES" + then + if test -z "$REBASE_ROOT" + then + mkdir "$REWRITTEN" && + for c in $(git merge-base --all $HEAD $UPSTREAM) + do + echo $ONTO > "$REWRITTEN"/$c || + die "Could not init rewritten commits" + done + else + mkdir "$REWRITTEN" && + echo $ONTO > "$REWRITTEN"/root || + die "Could not init rewritten commits" + fi + # No cherry-pick because our first pass is to determine + # parents to rewrite and skipping dropped commits would + # prematurely end our probe + MERGES_OPTION= + first_after_upstream="$(git rev-list --reverse --first-parent $UPSTREAM..$HEAD | head -n 1)" + else + MERGES_OPTION="--no-merges --cherry-pick" + fi + + SHORTHEAD=$(git rev-parse --short $HEAD) + SHORTONTO=$(git rev-parse --short $ONTO) + if test -z "$REBASE_ROOT" + # this is now equivalent to ! -z "$UPSTREAM" + then + SHORTUPSTREAM=$(git rev-parse --short $UPSTREAM) + REVISIONS=$UPSTREAM...$HEAD + SHORTREVISIONS=$SHORTUPSTREAM..$SHORTHEAD + else + REVISIONS=$ONTO...$HEAD + SHORTREVISIONS=$SHORTHEAD + fi + git rev-list $MERGES_OPTION --pretty=oneline --abbrev-commit \ + --abbrev=7 --reverse --left-right --topo-order \ + $REVISIONS | \ + sed -n "s/^>//p" | + while read -r shortsha1 rest + do + if test t != "$PRESERVE_MERGES" + then + printf '%s\n' "pick $shortsha1 $rest" >> "$TODO" + else + sha1=$(git rev-parse $shortsha1) + if test -z "$REBASE_ROOT" + then + preserve=t + for p in $(git rev-list --parents -1 $sha1 | cut -d' ' -s -f2-) + do + if test -f "$REWRITTEN"/$p -a \( $p != $ONTO -o $sha1 = $first_after_upstream \) + then + preserve=f + fi + done + else + preserve=f + fi + if test f = "$preserve" + then + touch "$REWRITTEN"/$sha1 + printf '%s\n' "pick $shortsha1 $rest" >> "$TODO" + fi + fi + done + + # Watch for commits that been dropped by --cherry-pick + if test t = "$PRESERVE_MERGES" + then + mkdir "$DROPPED" + # Save all non-cherry-picked changes + git rev-list $REVISIONS --left-right --cherry-pick | \ + sed -n "s/^>//p" > "$DOTEST"/not-cherry-picks + # Now all commits and note which ones are missing in + # not-cherry-picks and hence being dropped + git rev-list $REVISIONS | + while read rev + do + if test -f "$REWRITTEN"/$rev -a "$(sane_grep "$rev" "$DOTEST"/not-cherry-picks)" = "" + then + # Use -f2 because if rev-list is telling us this commit is + # not worthwhile, we don't want to track its multiple heads, + # just the history of its first-parent for others that will + # be rebasing on top of it + git rev-list --parents -1 $rev | cut -d' ' -s -f2 > "$DROPPED"/$rev + short=$(git rev-list -1 --abbrev-commit --abbrev=7 $rev) + sane_grep -v "^[a-z][a-z]* $short" <"$TODO" > "${TODO}2" ; mv "${TODO}2" "$TODO" + rm "$REWRITTEN"/$rev + fi + done + fi + + test -s "$TODO" || echo noop >> "$TODO" + test -n "$AUTOSQUASH" && rearrange_squash "$TODO" + cat >> "$TODO" << EOF + +# Rebase $SHORTREVISIONS onto $SHORTONTO +# +# Commands: +# p, pick = use commit +# r, reword = use commit, but edit the commit message +# e, edit = use commit, but stop for amending +# s, squash = use commit, but meld into previous commit +# f, fixup = like "squash", but discard this commit's log message +# x, exec = run command (the rest of the line) using shell +# +# If you remove a line here THAT COMMIT WILL BE LOST. +# However, if you remove everything, the rebase will be aborted. +# +EOF + + has_action "$TODO" || + die_abort "Nothing to do" + + cp "$TODO" "$TODO".backup + git_editor "$TODO" || + die_abort "Could not execute editor" + + has_action "$TODO" || + die_abort "Nothing to do" + + test -d "$REWRITTEN" || test -n "$NEVER_FF" || skip_unnecessary_picks + + output git checkout $ONTO || die_abort "could not detach HEAD" + git update-ref ORIG_HEAD $HEAD + do_rest + ;; + esac + shift +done diff --git a/SparkleShare/Mac/git/libexec/git-core/git-receive-pack b/SparkleShare/Mac/git/libexec/git-core/git-receive-pack new file mode 120000 index 00000000..f2cccfc2 --- /dev/null +++ b/SparkleShare/Mac/git/libexec/git-core/git-receive-pack @@ -0,0 +1 @@ +../../bin/git \ No newline at end of file diff --git a/SparkleShare/Mac/git/libexec/git-core/git-reflog b/SparkleShare/Mac/git/libexec/git-core/git-reflog new file mode 120000 index 00000000..f2cccfc2 --- /dev/null +++ b/SparkleShare/Mac/git/libexec/git-core/git-reflog @@ -0,0 +1 @@ +../../bin/git \ No newline at end of file diff --git a/SparkleShare/Mac/git/libexec/git-core/git-relink b/SparkleShare/Mac/git/libexec/git-core/git-relink new file mode 100755 index 00000000..20cdea39 --- /dev/null +++ b/SparkleShare/Mac/git/libexec/git-core/git-relink @@ -0,0 +1,174 @@ +#!/usr/bin/perl +use lib (split(/:/, $ENV{GITPERLLIB} || "/usr/local/git/lib/perl5/site_perl")); +# Copyright 2005, Ryan Anderson +# Distribution permitted under the GPL v2, as distributed +# by the Free Software Foundation. +# Later versions of the GPL at the discretion of Linus Torvalds +# +# Scan two git object-trees, and hardlink any common objects between them. + +use 5.008; +use strict; +use warnings; +use Getopt::Long; + +sub get_canonical_form($); +sub do_scan_directory($$$); +sub compare_two_files($$); +sub usage(); +sub link_two_files($$); + +# stats +my $total_linked = 0; +my $total_already = 0; +my ($linked,$already); + +my $fail_on_different_sizes = 0; +my $help = 0; +GetOptions("safe" => \$fail_on_different_sizes, + "help" => \$help); + +usage() if $help; + +my (@dirs) = @ARGV; + +usage() if (!defined $dirs[0] || !defined $dirs[1]); + +$_ = get_canonical_form($_) foreach (@dirs); + +my $master_dir = pop @dirs; + +opendir(D,$master_dir . "objects/") + or die "Failed to open $master_dir/objects/ : $!"; + +my @hashdirs = grep { ($_ eq 'pack') || /^[0-9a-f]{2}$/ } readdir(D); + +foreach my $repo (@dirs) { + $linked = 0; + $already = 0; + printf("Searching '%s' and '%s' for common objects and hardlinking them...\n", + $master_dir,$repo); + + foreach my $hashdir (@hashdirs) { + do_scan_directory($master_dir, $hashdir, $repo); + } + + printf("Linked %d files, %d were already linked.\n",$linked, $already); + + $total_linked += $linked; + $total_already += $already; +} + +printf("Totals: Linked %d files, %d were already linked.\n", + $total_linked, $total_already); + + +sub do_scan_directory($$$) { + my ($srcdir, $subdir, $dstdir) = @_; + + my $sfulldir = sprintf("%sobjects/%s/",$srcdir,$subdir); + my $dfulldir = sprintf("%sobjects/%s/",$dstdir,$subdir); + + opendir(S,$sfulldir) + or die "Failed to opendir $sfulldir: $!"; + + foreach my $file (grep(!/\.{1,2}$/, readdir(S))) { + my $sfilename = $sfulldir . $file; + my $dfilename = $dfulldir . $file; + + compare_two_files($sfilename,$dfilename); + + } + closedir(S); +} + +sub compare_two_files($$) { + my ($sfilename, $dfilename) = @_; + + # Perl's stat returns relevant information as follows: + # 0 = dev number + # 1 = inode number + # 7 = size + my @sstatinfo = stat($sfilename); + my @dstatinfo = stat($dfilename); + + if (@sstatinfo == 0 && @dstatinfo == 0) { + die sprintf("Stat of both %s and %s failed: %s\n",$sfilename, $dfilename, $!); + + } elsif (@dstatinfo == 0) { + return; + } + + if ( ($sstatinfo[0] == $dstatinfo[0]) && + ($sstatinfo[1] != $dstatinfo[1])) { + if ($sstatinfo[7] == $dstatinfo[7]) { + link_two_files($sfilename, $dfilename); + + } else { + my $err = sprintf("ERROR: File sizes are not the same, cannot relink %s to %s.\n", + $sfilename, $dfilename); + if ($fail_on_different_sizes) { + die $err; + } else { + warn $err; + } + } + + } elsif ( ($sstatinfo[0] == $dstatinfo[0]) && + ($sstatinfo[1] == $dstatinfo[1])) { + $already++; + } +} + +sub get_canonical_form($) { + my $dir = shift; + my $original = $dir; + + die "$dir is not a directory." unless -d $dir; + + $dir .= "/" unless $dir =~ m#/$#; + $dir .= ".git/" unless $dir =~ m#\.git/$#; + + die "$original does not have a .git/ subdirectory.\n" unless -d $dir; + + return $dir; +} + +sub link_two_files($$) { + my ($sfilename, $dfilename) = @_; + my $tmpdname = sprintf("%s.old",$dfilename); + rename($dfilename,$tmpdname) + or die sprintf("Failure renaming %s to %s: %s", + $dfilename, $tmpdname, $!); + + if (! link($sfilename,$dfilename)) { + my $failtxt = ""; + unless (rename($tmpdname,$dfilename)) { + $failtxt = sprintf( + "Git Repository containing %s is probably corrupted, " . + "please copy '%s' to '%s' to fix.\n", + $tmpdname, $dfilename); + } + + die sprintf("Failed to link %s to %s: %s\n%s" . + $sfilename, $dfilename, + $!, $dfilename, $failtxt); + } + + unlink($tmpdname) + or die sprintf("Unlink of %s failed: %s\n", + $dfilename, $!); + + $linked++; +} + + +sub usage() { + print("Usage: git relink [--safe] ... \n"); + print("All directories should contain a .git/objects/ subdirectory.\n"); + print("Options\n"); + print("\t--safe\t" . + "Stops if two objects with the same hash exist but " . + "have different sizes. Default is to warn and continue.\n"); + exit(1); +} diff --git a/SparkleShare/Mac/git/libexec/git-core/git-remote b/SparkleShare/Mac/git/libexec/git-core/git-remote new file mode 120000 index 00000000..f2cccfc2 --- /dev/null +++ b/SparkleShare/Mac/git/libexec/git-core/git-remote @@ -0,0 +1 @@ +../../bin/git \ No newline at end of file diff --git a/SparkleShare/Mac/git/libexec/git-core/git-remote-ext b/SparkleShare/Mac/git/libexec/git-core/git-remote-ext new file mode 120000 index 00000000..f2cccfc2 --- /dev/null +++ b/SparkleShare/Mac/git/libexec/git-core/git-remote-ext @@ -0,0 +1 @@ +../../bin/git \ No newline at end of file diff --git a/SparkleShare/Mac/git/libexec/git-core/git-remote-fd b/SparkleShare/Mac/git/libexec/git-core/git-remote-fd new file mode 120000 index 00000000..f2cccfc2 --- /dev/null +++ b/SparkleShare/Mac/git/libexec/git-core/git-remote-fd @@ -0,0 +1 @@ +../../bin/git \ No newline at end of file diff --git a/SparkleShare/Mac/git/libexec/git-core/git-remote-ftp b/SparkleShare/Mac/git/libexec/git-core/git-remote-ftp new file mode 100755 index 00000000..3f1687eb Binary files /dev/null and b/SparkleShare/Mac/git/libexec/git-core/git-remote-ftp differ diff --git a/SparkleShare/Mac/git/libexec/git-core/git-remote-ftps b/SparkleShare/Mac/git/libexec/git-core/git-remote-ftps new file mode 100755 index 00000000..3f1687eb Binary files /dev/null and b/SparkleShare/Mac/git/libexec/git-core/git-remote-ftps differ diff --git a/SparkleShare/Mac/git/libexec/git-core/git-remote-http b/SparkleShare/Mac/git/libexec/git-core/git-remote-http new file mode 100755 index 00000000..3f1687eb Binary files /dev/null and b/SparkleShare/Mac/git/libexec/git-core/git-remote-http differ diff --git a/SparkleShare/Mac/git/libexec/git-core/git-remote-https b/SparkleShare/Mac/git/libexec/git-core/git-remote-https new file mode 100755 index 00000000..3f1687eb Binary files /dev/null and b/SparkleShare/Mac/git/libexec/git-core/git-remote-https differ diff --git a/SparkleShare/Mac/git/libexec/git-core/git-remote-testgit b/SparkleShare/Mac/git/libexec/git-core/git-remote-testgit new file mode 100755 index 00000000..40cea61a --- /dev/null +++ b/SparkleShare/Mac/git/libexec/git-core/git-remote-testgit @@ -0,0 +1,244 @@ +#!/usr/bin/python + +# hashlib is only available in python >= 2.5 +try: + import hashlib + _digest = hashlib.sha1 +except ImportError: + import sha + _digest = sha.new +import sys +import os +sys.path.insert(0, os.getenv("GITPYTHONLIB","/usr/local/git/lib/python2.6/site-packages")) + +from git_remote_helpers.util import die, debug, warn +from git_remote_helpers.git.repo import GitRepo +from git_remote_helpers.git.exporter import GitExporter +from git_remote_helpers.git.importer import GitImporter +from git_remote_helpers.git.non_local import NonLocalGit + +def get_repo(alias, url): + """Returns a git repository object initialized for usage. + """ + + repo = GitRepo(url) + repo.get_revs() + repo.get_head() + + hasher = _digest() + hasher.update(repo.path) + repo.hash = hasher.hexdigest() + + repo.get_base_path = lambda base: os.path.join( + base, 'info', 'fast-import', repo.hash) + + prefix = 'refs/testgit/%s/' % alias + debug("prefix: '%s'", prefix) + + repo.gitdir = "" + repo.alias = alias + repo.prefix = prefix + + repo.exporter = GitExporter(repo) + repo.importer = GitImporter(repo) + repo.non_local = NonLocalGit(repo) + + return repo + + +def local_repo(repo, path): + """Returns a git repository object initalized for usage. + """ + + local = GitRepo(path) + + local.non_local = None + local.gitdir = repo.gitdir + local.alias = repo.alias + local.prefix = repo.prefix + local.hash = repo.hash + local.get_base_path = repo.get_base_path + local.exporter = GitExporter(local) + local.importer = GitImporter(local) + + return local + + +def do_capabilities(repo, args): + """Prints the supported capabilities. + """ + + print "import" + print "export" + print "gitdir" + print "refspec refs/heads/*:%s*" % repo.prefix + + print # end capabilities + + +def do_list(repo, args): + """Lists all known references. + + Bug: This will always set the remote head to master for non-local + repositories, since we have no way of determining what the remote + head is at clone time. + """ + + for ref in repo.revs: + debug("? refs/heads/%s", ref) + print "? refs/heads/%s" % ref + + if repo.head: + debug("@refs/heads/%s HEAD" % repo.head) + print "@refs/heads/%s HEAD" % repo.head + else: + debug("@refs/heads/master HEAD") + print "@refs/heads/master HEAD" + + print # end list + + +def update_local_repo(repo): + """Updates (or clones) a local repo. + """ + + if repo.local: + return repo + + path = repo.non_local.clone(repo.gitdir) + repo.non_local.update(repo.gitdir) + repo = local_repo(repo, path) + return repo + + +def do_import(repo, args): + """Exports a fast-import stream from testgit for git to import. + """ + + if len(args) != 1: + die("Import needs exactly one ref") + + if not repo.gitdir: + die("Need gitdir to import") + + repo = update_local_repo(repo) + repo.exporter.export_repo(repo.gitdir) + + +def do_export(repo, args): + """Imports a fast-import stream from git to testgit. + """ + + if not repo.gitdir: + die("Need gitdir to export") + + dirname = repo.get_base_path(repo.gitdir) + + if not os.path.exists(dirname): + os.makedirs(dirname) + + path = os.path.join(dirname, 'testgit.marks') + print path + if os.path.exists(path): + print path + else: + print "" + sys.stdout.flush() + + update_local_repo(repo) + repo.importer.do_import(repo.gitdir) + repo.non_local.push(repo.gitdir) + + +def do_gitdir(repo, args): + """Stores the location of the gitdir. + """ + + if not args: + die("gitdir needs an argument") + + repo.gitdir = ' '.join(args) + + +COMMANDS = { + 'capabilities': do_capabilities, + 'list': do_list, + 'import': do_import, + 'export': do_export, + 'gitdir': do_gitdir, +} + + +def sanitize(value): + """Cleans up the url. + """ + + if value.startswith('testgit::'): + value = value[9:] + + return value + + +def read_one_line(repo): + """Reads and processes one command. + """ + + line = sys.stdin.readline() + + cmdline = line + + if not cmdline: + warn("Unexpected EOF") + return False + + cmdline = cmdline.strip().split() + if not cmdline: + # Blank line means we're about to quit + return False + + cmd = cmdline.pop(0) + debug("Got command '%s' with args '%s'", cmd, ' '.join(cmdline)) + + if cmd not in COMMANDS: + die("Unknown command, %s", cmd) + + func = COMMANDS[cmd] + func(repo, cmdline) + sys.stdout.flush() + + return True + + +def main(args): + """Starts a new remote helper for the specified repository. + """ + + if len(args) != 3: + die("Expecting exactly three arguments.") + sys.exit(1) + + if os.getenv("GIT_DEBUG_TESTGIT"): + import git_remote_helpers.util + git_remote_helpers.util.DEBUG = True + + alias = sanitize(args[1]) + url = sanitize(args[2]) + + if not alias.isalnum(): + warn("non-alnum alias '%s'", alias) + alias = "tmp" + + args[1] = alias + args[2] = url + + repo = get_repo(alias, url) + + debug("Got arguments %s", args[1:]) + + more = True + + while (more): + more = read_one_line(repo) + +if __name__ == '__main__': + sys.exit(main(sys.argv)) diff --git a/SparkleShare/Mac/git/libexec/git-core/git-repack b/SparkleShare/Mac/git/libexec/git-core/git-repack new file mode 100755 index 00000000..624feec2 --- /dev/null +++ b/SparkleShare/Mac/git/libexec/git-core/git-repack @@ -0,0 +1,186 @@ +#!/bin/sh +# +# Copyright (c) 2005 Linus Torvalds +# + +OPTIONS_KEEPDASHDASH= +OPTIONS_SPEC="\ +git repack [options] +-- +a pack everything in a single pack +A same as -a, and turn unreachable objects loose +d remove redundant packs, and run git-prune-packed +f pass --no-reuse-delta to git-pack-objects +F pass --no-reuse-object to git-pack-objects +n do not run git-update-server-info +q,quiet be quiet +l pass --local to git-pack-objects + Packing constraints +window= size of the window used for delta compression +window-memory= same as the above, but limit memory size instead of entries count +depth= limits the maximum delta depth +max-pack-size= maximum size of each packfile +" +SUBDIRECTORY_OK='Yes' +. git-sh-setup + +no_update_info= all_into_one= remove_redundant= unpack_unreachable= +local= no_reuse= extra= +while test $# != 0 +do + case "$1" in + -n) no_update_info=t ;; + -a) all_into_one=t ;; + -A) all_into_one=t + unpack_unreachable=--unpack-unreachable ;; + -d) remove_redundant=t ;; + -q) GIT_QUIET=t ;; + -f) no_reuse=--no-reuse-delta ;; + -F) no_reuse=--no-reuse-object ;; + -l) local=--local ;; + --max-pack-size|--window|--window-memory|--depth) + extra="$extra $1=$2"; shift ;; + --) shift; break;; + *) usage ;; + esac + shift +done + +case "`git config --bool repack.usedeltabaseoffset || echo true`" in +true) + extra="$extra --delta-base-offset" ;; +esac + +PACKDIR="$GIT_OBJECT_DIRECTORY/pack" +PACKTMP="$PACKDIR/.tmp-$$-pack" +rm -f "$PACKTMP"-* +trap 'rm -f "$PACKTMP"-*' 0 1 2 3 15 + +# There will be more repacking strategies to come... +case ",$all_into_one," in +,,) + args='--unpacked --incremental' + ;; +,t,) + args= existing= + if [ -d "$PACKDIR" ]; then + for e in `cd "$PACKDIR" && find . -type f -name '*.pack' \ + | sed -e 's/^\.\///' -e 's/\.pack$//'` + do + if [ -e "$PACKDIR/$e.keep" ]; then + : keep + else + existing="$existing $e" + fi + done + if test -n "$existing" -a -n "$unpack_unreachable" -a \ + -n "$remove_redundant" + then + args="$args $unpack_unreachable" + fi + fi + ;; +esac + +mkdir -p "$PACKDIR" || exit + +args="$args $local ${GIT_QUIET:+-q} $no_reuse$extra" +names=$(git pack-objects --keep-true-parents --honor-pack-keep --non-empty --all --reflog $args &2 "WARNING: Some packs in use have been renamed by" + echo >&2 "WARNING: prefixing old- to their name, in order to" + echo >&2 "WARNING: replace them with the new version of the" + echo >&2 "WARNING: file. But the operation failed, and" + echo >&2 "WARNING: attempt to rename them back to their" + echo >&2 "WARNING: original names also failed." + echo >&2 "WARNING: Please rename them in $PACKDIR manually:" + for file in $rollback_failure + do + echo >&2 "WARNING: old-$file -> $file" + done + fi + exit 1 +fi + +# Now the ones with the same name are out of the way... +fullbases= +for name in $names +do + fullbases="$fullbases pack-$name" + chmod a-w "$PACKTMP-$name.pack" + chmod a-w "$PACKTMP-$name.idx" + mv -f "$PACKTMP-$name.pack" "$PACKDIR/pack-$name.pack" && + mv -f "$PACKTMP-$name.idx" "$PACKDIR/pack-$name.idx" || + exit +done + +# Remove the "old-" files +for name in $names +do + rm -f "$PACKDIR/old-pack-$name.idx" + rm -f "$PACKDIR/old-pack-$name.pack" +done + +# End of pack replacement. + +if test "$remove_redundant" = t +then + # We know $existing are all redundant. + if [ -n "$existing" ] + then + ( cd "$PACKDIR" && + for e in $existing + do + case " $fullbases " in + *" $e "*) ;; + *) rm -f "$e.pack" "$e.idx" "$e.keep" ;; + esac + done + ) + fi + git prune-packed ${GIT_QUIET:+-q} +fi + +case "$no_update_info" in +t) : ;; +*) git update-server-info ;; +esac diff --git a/SparkleShare/Mac/git/libexec/git-core/git-replace b/SparkleShare/Mac/git/libexec/git-core/git-replace new file mode 120000 index 00000000..f2cccfc2 --- /dev/null +++ b/SparkleShare/Mac/git/libexec/git-core/git-replace @@ -0,0 +1 @@ +../../bin/git \ No newline at end of file diff --git a/SparkleShare/Mac/git/libexec/git-core/git-repo-config b/SparkleShare/Mac/git/libexec/git-core/git-repo-config new file mode 120000 index 00000000..f2cccfc2 --- /dev/null +++ b/SparkleShare/Mac/git/libexec/git-core/git-repo-config @@ -0,0 +1 @@ +../../bin/git \ No newline at end of file diff --git a/SparkleShare/Mac/git/libexec/git-core/git-request-pull b/SparkleShare/Mac/git/libexec/git-core/git-request-pull new file mode 100755 index 00000000..6fdea397 --- /dev/null +++ b/SparkleShare/Mac/git/libexec/git-core/git-request-pull @@ -0,0 +1,79 @@ +#!/bin/sh +# Copyright 2005, Ryan Anderson +# +# This file is licensed under the GPL v2, or a later version +# at the discretion of Linus Torvalds. + +USAGE=' []' +LONG_USAGE='Summarizes the changes between two commits to the standard output, +and includes the given URL in the generated summary.' +SUBDIRECTORY_OK='Yes' +OPTIONS_KEEPDASHDASH= +OPTIONS_SPEC='git request-pull [options] start url [end] +-- +p show patch text as well +' + +. git-sh-setup +. git-parse-remote + +GIT_PAGER= +export GIT_PAGER + +patch= +while case "$#" in 0) break ;; esac +do + case "$1" in + -p) + patch=-p ;; + --) + shift; break ;; + -*) + usage ;; + *) + break ;; + esac + shift +done + +base=$1 +url=$2 +head=${3-HEAD} + +[ "$base" ] || usage +[ "$url" ] || usage + +baserev=`git rev-parse --verify "$base"^0` && +headrev=`git rev-parse --verify "$head"^0` || exit + +merge_base=`git merge-base $baserev $headrev` || +die "fatal: No commits in common between $base and $head" + +branch=$(git ls-remote "$url" \ + | sed -n -e "/^$headrev refs.heads./{ + s/^.* refs.heads.// + p + q + }") +url=$(get_remote_url "$url") +if [ -z "$branch" ]; then + echo "warn: No branch of $url is at:" >&2 + git log --max-count=1 --pretty='tformat:warn: %h: %s' $headrev >&2 + echo "warn: Are you sure you pushed $head there?" >&2 + echo >&2 + echo >&2 + branch=..BRANCH.NOT.VERIFIED.. + status=1 +fi + +git show -s --format='The following changes since commit %H: + + %s (%ci) + +are available in the git repository at:' $baserev && +echo " $url $branch" && +echo && + +git shortlog ^$baserev $headrev && +git diff -M --stat --summary $patch $merge_base..$headrev || exit +exit $status diff --git a/SparkleShare/Mac/git/libexec/git-core/git-rerere b/SparkleShare/Mac/git/libexec/git-core/git-rerere new file mode 120000 index 00000000..f2cccfc2 --- /dev/null +++ b/SparkleShare/Mac/git/libexec/git-core/git-rerere @@ -0,0 +1 @@ +../../bin/git \ No newline at end of file diff --git a/SparkleShare/Mac/git/libexec/git-core/git-reset b/SparkleShare/Mac/git/libexec/git-core/git-reset new file mode 120000 index 00000000..f2cccfc2 --- /dev/null +++ b/SparkleShare/Mac/git/libexec/git-core/git-reset @@ -0,0 +1 @@ +../../bin/git \ No newline at end of file diff --git a/SparkleShare/Mac/git/libexec/git-core/git-rev-list b/SparkleShare/Mac/git/libexec/git-core/git-rev-list new file mode 120000 index 00000000..f2cccfc2 --- /dev/null +++ b/SparkleShare/Mac/git/libexec/git-core/git-rev-list @@ -0,0 +1 @@ +../../bin/git \ No newline at end of file diff --git a/SparkleShare/Mac/git/libexec/git-core/git-rev-parse b/SparkleShare/Mac/git/libexec/git-core/git-rev-parse new file mode 120000 index 00000000..f2cccfc2 --- /dev/null +++ b/SparkleShare/Mac/git/libexec/git-core/git-rev-parse @@ -0,0 +1 @@ +../../bin/git \ No newline at end of file diff --git a/SparkleShare/Mac/git/libexec/git-core/git-revert b/SparkleShare/Mac/git/libexec/git-core/git-revert new file mode 120000 index 00000000..f2cccfc2 --- /dev/null +++ b/SparkleShare/Mac/git/libexec/git-core/git-revert @@ -0,0 +1 @@ +../../bin/git \ No newline at end of file diff --git a/SparkleShare/Mac/git/libexec/git-core/git-rm b/SparkleShare/Mac/git/libexec/git-core/git-rm new file mode 120000 index 00000000..f2cccfc2 --- /dev/null +++ b/SparkleShare/Mac/git/libexec/git-core/git-rm @@ -0,0 +1 @@ +../../bin/git \ No newline at end of file diff --git a/SparkleShare/Mac/git/libexec/git-core/git-send-email b/SparkleShare/Mac/git/libexec/git-core/git-send-email new file mode 100755 index 00000000..01d5a74e --- /dev/null +++ b/SparkleShare/Mac/git/libexec/git-core/git-send-email @@ -0,0 +1,1417 @@ +#!/usr/bin/perl +use lib (split(/:/, $ENV{GITPERLLIB} || "/usr/local/git/lib/perl5/site_perl")); +# +# Copyright 2002,2005 Greg Kroah-Hartman +# Copyright 2005 Ryan Anderson +# +# GPL v2 (See COPYING) +# +# Ported to support git "mbox" format files by Ryan Anderson +# +# Sends a collection of emails to the given email addresses, disturbingly fast. +# +# Supports two formats: +# 1. mbox format files (ignoring most headers and MIME formatting - this is designed for sending patches) +# 2. The original format support by Greg's script: +# first line of the message is who to CC, +# and second line is the subject of the message. +# + +use 5.008; +use strict; +use warnings; +use Term::ReadLine; +use Getopt::Long; +use Text::ParseWords; +use Data::Dumper; +use Term::ANSIColor; +use File::Temp qw/ tempdir tempfile /; +use File::Spec::Functions qw(catfile); +use Error qw(:try); +use Git; + +Getopt::Long::Configure qw/ pass_through /; + +package FakeTerm; +sub new { + my ($class, $reason) = @_; + return bless \$reason, shift; +} +sub readline { + my $self = shift; + die "Cannot use readline on FakeTerm: $$self"; +} +package main; + + +sub usage { + print < + + Composing: + --from * Email From: + --[no-]to * Email To: + --[no-]cc * Email Cc: + --[no-]bcc * Email Bcc: + --subject * Email "Subject:" + --in-reply-to * Email "In-Reply-To:" + --annotate * Review each patch that will be sent in an editor. + --compose * Open an editor for introduction. + --8bit-encoding * Encoding to assume 8bit mails if undeclared + + Sending: + --envelope-sender * Email envelope sender. + --smtp-server * Outgoing SMTP server to use. The port + is optional. Default 'localhost'. + --smtp-server-option * Outgoing SMTP server option to use. + --smtp-server-port * Outgoing SMTP server port. + --smtp-user * Username for SMTP-AUTH. + --smtp-pass * Password for SMTP-AUTH; not necessary. + --smtp-encryption * tls or ssl; anything else disables. + --smtp-ssl * Deprecated. Use '--smtp-encryption ssl'. + --smtp-domain * The domain name sent to HELO/EHLO handshake + --smtp-debug <0|1> * Disable, enable Net::SMTP debug. + + Automating: + --identity * Use the sendemail. options. + --to-cmd * Email To: via ` \$patch_path` + --cc-cmd * Email Cc: via ` \$patch_path` + --suppress-cc * author, self, sob, cc, cccmd, body, bodycc, all. + --[no-]signed-off-by-cc * Send to Signed-off-by: addresses. Default on. + --[no-]suppress-from * Send to self. Default off. + --[no-]chain-reply-to * Chain In-Reply-To: fields. Default off. + --[no-]thread * Use In-Reply-To: field. Default on. + + Administering: + --confirm * Confirm recipients before sending; + auto, cc, compose, always, or never. + --quiet * Output one line of info per email. + --dry-run * Don't actually send the emails. + --[no-]validate * Perform patch sanity checks. Default on. + --[no-]format-patch * understand any non optional arguments as + `git format-patch` ones. + --force * Send even if safety checks would prevent it. + +EOT + exit(1); +} + +# most mail servers generate the Date: header, but not all... +sub format_2822_time { + my ($time) = @_; + my @localtm = localtime($time); + my @gmttm = gmtime($time); + my $localmin = $localtm[1] + $localtm[2] * 60; + my $gmtmin = $gmttm[1] + $gmttm[2] * 60; + if ($localtm[0] != $gmttm[0]) { + die "local zone differs from GMT by a non-minute interval\n"; + } + if ((($gmttm[6] + 1) % 7) == $localtm[6]) { + $localmin += 1440; + } elsif ((($gmttm[6] - 1) % 7) == $localtm[6]) { + $localmin -= 1440; + } elsif ($gmttm[6] != $localtm[6]) { + die "local time offset greater than or equal to 24 hours\n"; + } + my $offset = $localmin - $gmtmin; + my $offhour = $offset / 60; + my $offmin = abs($offset % 60); + if (abs($offhour) >= 24) { + die ("local time offset greater than or equal to 24 hours\n"); + } + + return sprintf("%s, %2d %s %d %02d:%02d:%02d %s%02d%02d", + qw(Sun Mon Tue Wed Thu Fri Sat)[$localtm[6]], + $localtm[3], + qw(Jan Feb Mar Apr May Jun + Jul Aug Sep Oct Nov Dec)[$localtm[4]], + $localtm[5]+1900, + $localtm[2], + $localtm[1], + $localtm[0], + ($offset >= 0) ? '+' : '-', + abs($offhour), + $offmin, + ); +} + +my $have_email_valid = eval { require Email::Valid; 1 }; +my $have_mail_address = eval { require Mail::Address; 1 }; +my $smtp; +my $auth; + +# Variables we fill in automatically, or via prompting: +my (@to,$no_to,@initial_to,@cc,$no_cc,@initial_cc,@bcclist,$no_bcc,@xh, + $initial_reply_to,$initial_subject,@files, + $author,$sender,$smtp_authpass,$annotate,$compose,$time); + +my $envelope_sender; + +# Example reply to: +#$initial_reply_to = ''; #<20050203173208.GA23964@foobar.com>'; + +my $repo = eval { Git->repository() }; +my @repo = $repo ? ($repo) : (); +my $term = eval { + $ENV{"GIT_SEND_EMAIL_NOTTY"} + ? new Term::ReadLine 'git-send-email', \*STDIN, \*STDOUT + : new Term::ReadLine 'git-send-email'; +}; +if ($@) { + $term = new FakeTerm "$@: going non-interactive"; +} + +# Behavior modification variables +my ($quiet, $dry_run) = (0, 0); +my $format_patch; +my $compose_filename; +my $force = 0; + +# Handle interactive edition of files. +my $multiedit; +my $editor; + +sub do_edit { + if (!defined($editor)) { + $editor = Git::command_oneline('var', 'GIT_EDITOR'); + } + if (defined($multiedit) && !$multiedit) { + map { + system('sh', '-c', $editor.' "$@"', $editor, $_); + if (($? & 127) || ($? >> 8)) { + die("the editor exited uncleanly, aborting everything"); + } + } @_; + } else { + system('sh', '-c', $editor.' "$@"', $editor, @_); + if (($? & 127) || ($? >> 8)) { + die("the editor exited uncleanly, aborting everything"); + } + } +} + +# Variables with corresponding config settings +my ($thread, $chain_reply_to, $suppress_from, $signed_off_by_cc); +my ($to_cmd, $cc_cmd); +my ($smtp_server, $smtp_server_port, @smtp_server_options); +my ($smtp_authuser, $smtp_encryption); +my ($identity, $aliasfiletype, @alias_files, $smtp_domain); +my ($validate, $confirm); +my (@suppress_cc); +my ($auto_8bit_encoding); + +my ($debug_net_smtp) = 0; # Net::SMTP, see send_message() + +my $not_set_by_user = "true but not set by the user"; + +my %config_bool_settings = ( + "thread" => [\$thread, 1], + "chainreplyto" => [\$chain_reply_to, $not_set_by_user], + "suppressfrom" => [\$suppress_from, undef], + "signedoffbycc" => [\$signed_off_by_cc, undef], + "signedoffcc" => [\$signed_off_by_cc, undef], # Deprecated + "validate" => [\$validate, 1], +); + +my %config_settings = ( + "smtpserver" => \$smtp_server, + "smtpserverport" => \$smtp_server_port, + "smtpserveroption" => \@smtp_server_options, + "smtpuser" => \$smtp_authuser, + "smtppass" => \$smtp_authpass, + "smtpdomain" => \$smtp_domain, + "to" => \@initial_to, + "tocmd" => \$to_cmd, + "cc" => \@initial_cc, + "cccmd" => \$cc_cmd, + "aliasfiletype" => \$aliasfiletype, + "bcc" => \@bcclist, + "aliasesfile" => \@alias_files, + "suppresscc" => \@suppress_cc, + "envelopesender" => \$envelope_sender, + "multiedit" => \$multiedit, + "confirm" => \$confirm, + "from" => \$sender, + "assume8bitencoding" => \$auto_8bit_encoding, +); + +# Help users prepare for 1.7.0 +sub chain_reply_to { + if (defined $chain_reply_to && + $chain_reply_to eq $not_set_by_user) { + print STDERR + "In git 1.7.0, the default has changed to --no-chain-reply-to\n" . + "Set sendemail.chainreplyto configuration variable to true if\n" . + "you want to keep --chain-reply-to as your default.\n"; + $chain_reply_to = 0; + } + return $chain_reply_to; +} + +# Handle Uncouth Termination +sub signal_handler { + + # Make text normal + print color("reset"), "\n"; + + # SMTP password masked + system "stty echo"; + + # tmp files from --compose + if (defined $compose_filename) { + if (-e $compose_filename) { + print "'$compose_filename' contains an intermediate version of the email you were composing.\n"; + } + if (-e ($compose_filename . ".final")) { + print "'$compose_filename.final' contains the composed email.\n" + } + } + + exit; +}; + +$SIG{TERM} = \&signal_handler; +$SIG{INT} = \&signal_handler; + +# Begin by accumulating all the variables (defined above), that we will end up +# needing, first, from the command line: + +my $rc = GetOptions("sender|from=s" => \$sender, + "in-reply-to=s" => \$initial_reply_to, + "subject=s" => \$initial_subject, + "to=s" => \@initial_to, + "to-cmd=s" => \$to_cmd, + "no-to" => \$no_to, + "cc=s" => \@initial_cc, + "no-cc" => \$no_cc, + "bcc=s" => \@bcclist, + "no-bcc" => \$no_bcc, + "chain-reply-to!" => \$chain_reply_to, + "smtp-server=s" => \$smtp_server, + "smtp-server-option=s" => \@smtp_server_options, + "smtp-server-port=s" => \$smtp_server_port, + "smtp-user=s" => \$smtp_authuser, + "smtp-pass:s" => \$smtp_authpass, + "smtp-ssl" => sub { $smtp_encryption = 'ssl' }, + "smtp-encryption=s" => \$smtp_encryption, + "smtp-debug:i" => \$debug_net_smtp, + "smtp-domain:s" => \$smtp_domain, + "identity=s" => \$identity, + "annotate" => \$annotate, + "compose" => \$compose, + "quiet" => \$quiet, + "cc-cmd=s" => \$cc_cmd, + "suppress-from!" => \$suppress_from, + "suppress-cc=s" => \@suppress_cc, + "signed-off-cc|signed-off-by-cc!" => \$signed_off_by_cc, + "confirm=s" => \$confirm, + "dry-run" => \$dry_run, + "envelope-sender=s" => \$envelope_sender, + "thread!" => \$thread, + "validate!" => \$validate, + "format-patch!" => \$format_patch, + "8bit-encoding=s" => \$auto_8bit_encoding, + "force" => \$force, + ); + +unless ($rc) { + usage(); +} + +die "Cannot run git format-patch from outside a repository\n" + if $format_patch and not $repo; + +# Now, let's fill any that aren't set in with defaults: + +sub read_config { + my ($prefix) = @_; + + foreach my $setting (keys %config_bool_settings) { + my $target = $config_bool_settings{$setting}->[0]; + $$target = Git::config_bool(@repo, "$prefix.$setting") unless (defined $$target); + } + + foreach my $setting (keys %config_settings) { + my $target = $config_settings{$setting}; + next if $setting eq "to" and defined $no_to; + next if $setting eq "cc" and defined $no_cc; + next if $setting eq "bcc" and defined $no_bcc; + if (ref($target) eq "ARRAY") { + unless (@$target) { + my @values = Git::config(@repo, "$prefix.$setting"); + @$target = @values if (@values && defined $values[0]); + } + } + else { + $$target = Git::config(@repo, "$prefix.$setting") unless (defined $$target); + } + } + + if (!defined $smtp_encryption) { + my $enc = Git::config(@repo, "$prefix.smtpencryption"); + if (defined $enc) { + $smtp_encryption = $enc; + } elsif (Git::config_bool(@repo, "$prefix.smtpssl")) { + $smtp_encryption = 'ssl'; + } + } +} + +# read configuration from [sendemail "$identity"], fall back on [sendemail] +$identity = Git::config(@repo, "sendemail.identity") unless (defined $identity); +read_config("sendemail.$identity") if (defined $identity); +read_config("sendemail"); + +# fall back on builtin bool defaults +foreach my $setting (values %config_bool_settings) { + ${$setting->[0]} = $setting->[1] unless (defined (${$setting->[0]})); +} + +# 'default' encryption is none -- this only prevents a warning +$smtp_encryption = '' unless (defined $smtp_encryption); + +# Set CC suppressions +my(%suppress_cc); +if (@suppress_cc) { + foreach my $entry (@suppress_cc) { + die "Unknown --suppress-cc field: '$entry'\n" + unless $entry =~ /^(?:all|cccmd|cc|author|self|sob|body|bodycc)$/; + $suppress_cc{$entry} = 1; + } +} + +if ($suppress_cc{'all'}) { + foreach my $entry (qw (cccmd cc author self sob body bodycc)) { + $suppress_cc{$entry} = 1; + } + delete $suppress_cc{'all'}; +} + +# If explicit old-style ones are specified, they trump --suppress-cc. +$suppress_cc{'self'} = $suppress_from if defined $suppress_from; +$suppress_cc{'sob'} = !$signed_off_by_cc if defined $signed_off_by_cc; + +if ($suppress_cc{'body'}) { + foreach my $entry (qw (sob bodycc)) { + $suppress_cc{$entry} = 1; + } + delete $suppress_cc{'body'}; +} + +# Set confirm's default value +my $confirm_unconfigured = !defined $confirm; +if ($confirm_unconfigured) { + $confirm = scalar %suppress_cc ? 'compose' : 'auto'; +}; +die "Unknown --confirm setting: '$confirm'\n" + unless $confirm =~ /^(?:auto|cc|compose|always|never)/; + +# Debugging, print out the suppressions. +if (0) { + print "suppressions:\n"; + foreach my $entry (keys %suppress_cc) { + printf " %-5s -> $suppress_cc{$entry}\n", $entry; + } +} + +my ($repoauthor, $repocommitter); +($repoauthor) = Git::ident_person(@repo, 'author'); +($repocommitter) = Git::ident_person(@repo, 'committer'); + +# Verify the user input + +foreach my $entry (@initial_to) { + die "Comma in --to entry: $entry'\n" unless $entry !~ m/,/; +} + +foreach my $entry (@initial_cc) { + die "Comma in --cc entry: $entry'\n" unless $entry !~ m/,/; +} + +foreach my $entry (@bcclist) { + die "Comma in --bcclist entry: $entry'\n" unless $entry !~ m/,/; +} + +sub parse_address_line { + if ($have_mail_address) { + return map { $_->format } Mail::Address->parse($_[0]); + } else { + return split_addrs($_[0]); + } +} + +sub split_addrs { + return quotewords('\s*,\s*', 1, @_); +} + +my %aliases; +my %parse_alias = ( + # multiline formats can be supported in the future + mutt => sub { my $fh = shift; while (<$fh>) { + if (/^\s*alias\s+(?:-group\s+\S+\s+)*(\S+)\s+(.*)$/) { + my ($alias, $addr) = ($1, $2); + $addr =~ s/#.*$//; # mutt allows # comments + # commas delimit multiple addresses + $aliases{$alias} = [ split_addrs($addr) ]; + }}}, + mailrc => sub { my $fh = shift; while (<$fh>) { + if (/^alias\s+(\S+)\s+(.*)$/) { + # spaces delimit multiple addresses + $aliases{$1} = [ quotewords('\s+', 0, $2) ]; + }}}, + pine => sub { my $fh = shift; my $f='\t[^\t]*'; + for (my $x = ''; defined($x); $x = $_) { + chomp $x; + $x .= $1 while(defined($_ = <$fh>) && /^ +(.*)$/); + $x =~ /^(\S+)$f\t\(?([^\t]+?)\)?(:?$f){0,2}$/ or next; + $aliases{$1} = [ split_addrs($2) ]; + }}, + elm => sub { my $fh = shift; + while (<$fh>) { + if (/^(\S+)\s+=\s+[^=]+=\s(\S+)/) { + my ($alias, $addr) = ($1, $2); + $aliases{$alias} = [ split_addrs($addr) ]; + } + } }, + + gnus => sub { my $fh = shift; while (<$fh>) { + if (/\(define-mail-alias\s+"(\S+?)"\s+"(\S+?)"\)/) { + $aliases{$1} = [ $2 ]; + }}} +); + +if (@alias_files and $aliasfiletype and defined $parse_alias{$aliasfiletype}) { + foreach my $file (@alias_files) { + open my $fh, '<', $file or die "opening $file: $!\n"; + $parse_alias{$aliasfiletype}->($fh); + close $fh; + } +} + +($sender) = expand_aliases($sender) if defined $sender; + +# returns 1 if the conflict must be solved using it as a format-patch argument +sub check_file_rev_conflict($) { + return unless $repo; + my $f = shift; + try { + $repo->command('rev-parse', '--verify', '--quiet', $f); + if (defined($format_patch)) { + return $format_patch; + } + die(<command('format-patch', '-o', tempdir(CLEANUP => 1), @rev_list_opts); +} + +if ($validate) { + foreach my $f (@files) { + unless (-p $f) { + my $error = validate_patch($f); + $error and die "fatal: $f: $error\nwarning: no patches were sent\n"; + } + } +} + +if (@files) { + unless ($quiet) { + print $_,"\n" for (@files); + } +} else { + print STDERR "\nNo patch files specified!\n\n"; + usage(); +} + +sub get_patch_subject { + my $fn = shift; + open (my $fh, '<', $fn); + while (my $line = <$fh>) { + next unless ($line =~ /^Subject: (.*)$/); + close $fh; + return "GIT: $1\n"; + } + close $fh; + die "No subject line in $fn ?"; +} + +if ($compose) { + # Note that this does not need to be secure, but we will make a small + # effort to have it be unique + $compose_filename = ($repo ? + tempfile(".gitsendemail.msg.XXXXXX", DIR => $repo->repo_path()) : + tempfile(".gitsendemail.msg.XXXXXX", DIR => "."))[1]; + open my $c, ">", $compose_filename + or die "Failed to open for writing $compose_filename: $!"; + + + my $tpl_sender = $sender || $repoauthor || $repocommitter || ''; + my $tpl_subject = $initial_subject || ''; + my $tpl_reply_to = $initial_reply_to || ''; + + print $c <", $compose_filename . ".final" + or die "Failed to open $compose_filename.final : " . $!; + + open $c, "<", $compose_filename + or die "Failed to open $compose_filename : " . $!; + + my $need_8bit_cte = file_has_nonascii($compose_filename); + my $in_body = 0; + my $summary_empty = 1; + while(<$c>) { + next if m/^GIT:/; + if ($in_body) { + $summary_empty = 0 unless (/^\n$/); + } elsif (/^\n$/) { + $in_body = 1; + if ($need_8bit_cte) { + print $c2 "MIME-Version: 1.0\n", + "Content-Type: text/plain; ", + "charset=UTF-8\n", + "Content-Transfer-Encoding: 8bit\n"; + } + } elsif (/^MIME-Version:/i) { + $need_8bit_cte = 0; + } elsif (/^Subject:\s*(.+)\s*$/i) { + $initial_subject = $1; + my $subject = $initial_subject; + $_ = "Subject: " . + ($subject =~ /[^[:ascii:]]/ ? + quote_rfc2047($subject) : + $subject) . + "\n"; + } elsif (/^In-Reply-To:\s*(.+)\s*$/i) { + $initial_reply_to = $1; + next; + } elsif (/^From:\s*(.+)\s*$/i) { + $sender = $1; + next; + } elsif (/^(?:To|Cc|Bcc):/i) { + print "To/Cc/Bcc fields are not interpreted yet, they have been ignored\n"; + next; + } + print $c2 $_; + } + close $c; + close $c2; + + if ($summary_empty) { + print "Summary email is empty, skipping it\n"; + $compose = -1; + } +} elsif ($annotate) { + do_edit(@files); +} + +sub ask { + my ($prompt, %arg) = @_; + my $valid_re = $arg{valid_re}; + my $default = $arg{default}; + my $resp; + my $i = 0; + return defined $default ? $default : undef + unless defined $term->IN and defined fileno($term->IN) and + defined $term->OUT and defined fileno($term->OUT); + while ($i++ < 10) { + $resp = $term->readline($prompt); + if (!defined $resp) { # EOF + print "\n"; + return defined $default ? $default : undef; + } + if ($resp eq '' and defined $default) { + return $default; + } + if (!defined $valid_re or $resp =~ /$valid_re/) { + return $resp; + } + } + return undef; +} + +my %broken_encoding; + +sub file_declares_8bit_cte { + my $fn = shift; + open (my $fh, '<', $fn); + while (my $line = <$fh>) { + last if ($line =~ /^$/); + return 1 if ($line =~ /^Content-Transfer-Encoding: .*8bit.*$/); + } + close $fh; + return 0; +} + +foreach my $f (@files) { + next unless (body_or_subject_has_nonascii($f) + && !file_declares_8bit_cte($f)); + $broken_encoding{$f} = 1; +} + +if (!defined $auto_8bit_encoding && scalar %broken_encoding) { + print "The following files are 8bit, but do not declare " . + "a Content-Transfer-Encoding.\n"; + foreach my $f (sort keys %broken_encoding) { + print " $f\n"; + } + $auto_8bit_encoding = ask("Which 8bit encoding should I declare [UTF-8]? ", + default => "UTF-8"); +} + +if (!$force) { + for my $f (@files) { + if (get_patch_subject($f) =~ /\Q*** SUBJECT HERE ***\E/) { + die "Refusing to send because the patch\n\t$f\n" + . "has the template subject '*** SUBJECT HERE ***'. " + . "Pass --force if you really want to send.\n"; + } + } +} + +my $prompting = 0; +if (!defined $sender) { + $sender = $repoauthor || $repocommitter || ''; + $sender = ask("Who should the emails appear to be from? [$sender] ", + default => $sender); + print "Emails will be sent from: ", $sender, "\n"; + $prompting++; +} + +if (!@initial_to && !defined $to_cmd) { + my $to = ask("Who should the emails be sent to? "); + push @initial_to, parse_address_line($to) if defined $to; # sanitized/validated later + $prompting++; +} + +sub expand_aliases { + return map { expand_one_alias($_) } @_; +} + +my %EXPANDED_ALIASES; +sub expand_one_alias { + my $alias = shift; + if ($EXPANDED_ALIASES{$alias}) { + die "fatal: alias '$alias' expands to itself\n"; + } + local $EXPANDED_ALIASES{$alias} = 1; + return $aliases{$alias} ? expand_aliases(@{$aliases{$alias}}) : $alias; +} + +@initial_to = expand_aliases(@initial_to); +@initial_to = (map { sanitize_address($_) } @initial_to); +@initial_cc = expand_aliases(@initial_cc); +@bcclist = expand_aliases(@bcclist); + +if ($thread && !defined $initial_reply_to && $prompting) { + $initial_reply_to = ask( + "Message-ID to be used as In-Reply-To for the first email? "); +} +if (defined $initial_reply_to) { + $initial_reply_to =~ s/^\s*?\s*$//; + $initial_reply_to = "<$initial_reply_to>" if $initial_reply_to ne ''; +} + +if (!defined $smtp_server) { + foreach (qw( /usr/sbin/sendmail /usr/lib/sendmail )) { + if (-x $_) { + $smtp_server = $_; + last; + } + } + $smtp_server ||= 'localhost'; # could be 127.0.0.1, too... *shrug* +} + +if ($compose && $compose > 0) { + @files = ($compose_filename . ".final", @files); +} + +# Variables we set as part of the loop over files +our ($message_id, %mail, $subject, $reply_to, $references, $message, + $needs_confirm, $message_num, $ask_default); + +sub extract_valid_address { + my $address = shift; + my $local_part_regexp = qr/[^<>"\s@]+/; + my $domain_regexp = qr/[^.<>"\s@]+(?:\.[^.<>"\s@]+)+/; + + # check for a local address: + return $address if ($address =~ /^($local_part_regexp)$/); + + $address =~ s/^\s*<(.*)>\s*$/$1/; + if ($have_email_valid) { + return scalar Email::Valid->address($address); + } else { + # less robust/correct than the monster regexp in Email::Valid, + # but still does a 99% job, and one less dependency + $address =~ /($local_part_regexp\@$domain_regexp)/; + return $1; + } +} + +# Usually don't need to change anything below here. + +# we make a "fake" message id by taking the current number +# of seconds since the beginning of Unix time and tacking on +# a random number to the end, in case we are called quicker than +# 1 second since the last time we were called. + +# We'll setup a template for the message id, using the "from" address: + +my ($message_id_stamp, $message_id_serial); +sub make_message_id { + my $uniq; + if (!defined $message_id_stamp) { + $message_id_stamp = sprintf("%s-%s", time, $$); + $message_id_serial = 0; + } + $message_id_serial++; + $uniq = "$message_id_stamp-$message_id_serial"; + + my $du_part; + for ($sender, $repocommitter, $repoauthor) { + $du_part = extract_valid_address(sanitize_address($_)); + last if (defined $du_part and $du_part ne ''); + } + if (not defined $du_part or $du_part eq '') { + require Sys::Hostname; + $du_part = 'user@' . Sys::Hostname::hostname(); + } + my $message_id_template = "<%s-git-send-email-%s>"; + $message_id = sprintf($message_id_template, $uniq, $du_part); + #print "new message id = $message_id\n"; # Was useful for debugging +} + + + +$time = time - scalar $#files; + +sub unquote_rfc2047 { + local ($_) = @_; + my $encoding; + if (s/=\?([^?]+)\?q\?(.*)\?=/$2/g) { + $encoding = $1; + s/_/ /g; + s/=([0-9A-F]{2})/chr(hex($1))/eg; + } + return wantarray ? ($_, $encoding) : $_; +} + +sub quote_rfc2047 { + local $_ = shift; + my $encoding = shift || 'UTF-8'; + s/([^-a-zA-Z0-9!*+\/])/sprintf("=%02X", ord($1))/eg; + s/(.*)/=\?$encoding\?q\?$1\?=/; + return $_; +} + +sub is_rfc2047_quoted { + my $s = shift; + my $token = qr/[^][()<>@,;:"\/?.= \000-\037\177-\377]+/; + my $encoded_text = qr/[!->@-~]+/; + length($s) <= 75 && + $s =~ m/^(?:"[[:ascii:]]*"|=\?$token\?$token\?$encoded_text\?=)$/o; +} + +# use the simplest quoting being able to handle the recipient +sub sanitize_address { + my ($recipient) = @_; + my ($recipient_name, $recipient_addr) = ($recipient =~ /^(.*?)\s*(<.*)/); + + if (not $recipient_name) { + return $recipient; + } + + # if recipient_name is already quoted, do nothing + if (is_rfc2047_quoted($recipient_name)) { + return $recipient; + } + + # rfc2047 is needed if a non-ascii char is included + if ($recipient_name =~ /[^[:ascii:]]/) { + $recipient_name =~ s/^"(.*)"$/$1/; + $recipient_name = quote_rfc2047($recipient_name); + } + + # double quotes are needed if specials or CTLs are included + elsif ($recipient_name =~ /[][()<>@,;:\\".\000-\037\177]/) { + $recipient_name =~ s/(["\\\r])/\\$1/g; + $recipient_name = qq["$recipient_name"]; + } + + return "$recipient_name $recipient_addr"; + +} + +# Returns the local Fully Qualified Domain Name (FQDN) if available. +# +# Tightly configured MTAa require that a caller sends a real DNS +# domain name that corresponds the IP address in the HELO/EHLO +# handshake. This is used to verify the connection and prevent +# spammers from trying to hide their identity. If the DNS and IP don't +# match, the receiveing MTA may deny the connection. +# +# Here is a deny example of Net::SMTP with the default "localhost.localdomain" +# +# Net::SMTP=GLOB(0x267ec28)>>> EHLO localhost.localdomain +# Net::SMTP=GLOB(0x267ec28)<<< 550 EHLO argument does not match calling host +# +# This maildomain*() code is based on ideas in Perl library Test::Reporter +# /usr/share/perl5/Test/Reporter/Mail/Util.pm ==> sub _maildomain () + +sub valid_fqdn { + my $domain = shift; + return defined $domain && !($^O eq 'darwin' && $domain =~ /\.local$/) && $domain =~ /\./; +} + +sub maildomain_net { + my $maildomain; + + if (eval { require Net::Domain; 1 }) { + my $domain = Net::Domain::domainname(); + $maildomain = $domain if valid_fqdn($domain); + } + + return $maildomain; +} + +sub maildomain_mta { + my $maildomain; + + if (eval { require Net::SMTP; 1 }) { + for my $host (qw(mailhost localhost)) { + my $smtp = Net::SMTP->new($host); + if (defined $smtp) { + my $domain = $smtp->domain; + $smtp->quit; + + $maildomain = $domain if valid_fqdn($domain); + + last if $maildomain; + } + } + } + + return $maildomain; +} + +sub maildomain { + return maildomain_net() || maildomain_mta() || 'localhost.localdomain'; +} + +# Returns 1 if the message was sent, and 0 otherwise. +# In actuality, the whole program dies when there +# is an error sending a message. + +sub send_message { + my @recipients = unique_email_list(@to); + @cc = (grep { my $cc = extract_valid_address($_); + not grep { $cc eq $_ || $_ =~ /<\Q${cc}\E>$/ } @recipients + } + map { sanitize_address($_) } + @cc); + my $to = join (",\n\t", @recipients); + @recipients = unique_email_list(@recipients,@cc,@bcclist); + @recipients = (map { extract_valid_address($_) } @recipients); + my $date = format_2822_time($time++); + my $gitversion = '1.7.4'; + if ($gitversion =~ m/..GIT_VERSION../) { + $gitversion = Git::version(); + } + + my $cc = join(",\n\t", unique_email_list(@cc)); + my $ccline = ""; + if ($cc ne '') { + $ccline = "\nCc: $cc"; + } + my $sanitized_sender = sanitize_address($sender); + make_message_id() unless defined($message_id); + + my $header = "From: $sanitized_sender +To: $to${ccline} +Subject: $subject +Date: $date +Message-Id: $message_id +X-Mailer: git-send-email $gitversion +"; + if ($reply_to) { + + $header .= "In-Reply-To: $reply_to\n"; + $header .= "References: $references\n"; + } + if (@xh) { + $header .= join("\n", @xh) . "\n"; + } + + my @sendmail_parameters = ('-i', @recipients); + my $raw_from = $sanitized_sender; + if (defined $envelope_sender && $envelope_sender ne "auto") { + $raw_from = $envelope_sender; + } + $raw_from = extract_valid_address($raw_from); + unshift (@sendmail_parameters, + '-f', $raw_from) if(defined $envelope_sender); + + if ($needs_confirm && !$dry_run) { + print "\n$header\n"; + if ($needs_confirm eq "inform") { + $confirm_unconfigured = 0; # squelch this message for the rest of this run + $ask_default = "y"; # assume yes on EOF since user hasn't explicitly asked for confirmation + print " The Cc list above has been expanded by additional\n"; + print " addresses found in the patch commit message. By default\n"; + print " send-email prompts before sending whenever this occurs.\n"; + print " This behavior is controlled by the sendemail.confirm\n"; + print " configuration setting.\n"; + print "\n"; + print " For additional information, run 'git send-email --help'.\n"; + print " To retain the current behavior, but squelch this message,\n"; + print " run 'git config --global sendemail.confirm auto'.\n\n"; + } + $_ = ask("Send this email? ([y]es|[n]o|[q]uit|[a]ll): ", + valid_re => qr/^(?:yes|y|no|n|quit|q|all|a)/i, + default => $ask_default); + die "Send this email reply required" unless defined $_; + if (/^n/i) { + return 0; + } elsif (/^q/i) { + cleanup_compose_files(); + exit(0); + } elsif (/^a/i) { + $confirm = 'never'; + } + } + + unshift (@sendmail_parameters, @smtp_server_options); + + if ($dry_run) { + # We don't want to send the email. + } elsif ($smtp_server =~ m#^/#) { + my $pid = open my $sm, '|-'; + defined $pid or die $!; + if (!$pid) { + exec($smtp_server, @sendmail_parameters) or die $!; + } + print $sm "$header\n$message"; + close $sm or die $!; + } else { + + if (!defined $smtp_server) { + die "The required SMTP server is not properly defined." + } + + if ($smtp_encryption eq 'ssl') { + $smtp_server_port ||= 465; # ssmtp + require Net::SMTP::SSL; + $smtp_domain ||= maildomain(); + $smtp ||= Net::SMTP::SSL->new($smtp_server, + Hello => $smtp_domain, + Port => $smtp_server_port); + } + else { + require Net::SMTP; + $smtp_domain ||= maildomain(); + $smtp ||= Net::SMTP->new((defined $smtp_server_port) + ? "$smtp_server:$smtp_server_port" + : $smtp_server, + Hello => $smtp_domain, + Debug => $debug_net_smtp); + if ($smtp_encryption eq 'tls' && $smtp) { + require Net::SMTP::SSL; + $smtp->command('STARTTLS'); + $smtp->response(); + if ($smtp->code == 220) { + $smtp = Net::SMTP::SSL->start_SSL($smtp) + or die "STARTTLS failed! ".$smtp->message; + $smtp_encryption = ''; + # Send EHLO again to receive fresh + # supported commands + $smtp->hello(); + } else { + die "Server does not support STARTTLS! ".$smtp->message; + } + } + } + + if (!$smtp) { + die "Unable to initialize SMTP properly. Check config and use --smtp-debug. ", + "VALUES: server=$smtp_server ", + "encryption=$smtp_encryption ", + "hello=$smtp_domain", + defined $smtp_server_port ? "port=$smtp_server_port" : ""; + } + + if (defined $smtp_authuser) { + + if (!defined $smtp_authpass) { + + system "stty -echo"; + + do { + print "Password: "; + $_ = ; + print "\n"; + } while (!defined $_); + + chomp($smtp_authpass = $_); + + system "stty echo"; + } + + $auth ||= $smtp->auth( $smtp_authuser, $smtp_authpass ) or die $smtp->message; + } + + $smtp->mail( $raw_from ) or die $smtp->message; + $smtp->to( @recipients ) or die $smtp->message; + $smtp->data or die $smtp->message; + $smtp->datasend("$header\n$message") or die $smtp->message; + $smtp->dataend() or die $smtp->message; + $smtp->code =~ /250|200/ or die "Failed to send $subject\n".$smtp->message; + } + if ($quiet) { + printf (($dry_run ? "Dry-" : "")."Sent %s\n", $subject); + } else { + print (($dry_run ? "Dry-" : "")."OK. Log says:\n"); + if ($smtp_server !~ m#^/#) { + print "Server: $smtp_server\n"; + print "MAIL FROM:<$raw_from>\n"; + foreach my $entry (@recipients) { + print "RCPT TO:<$entry>\n"; + } + } else { + print "Sendmail: $smtp_server ".join(' ',@sendmail_parameters)."\n"; + } + print $header, "\n"; + if ($smtp) { + print "Result: ", $smtp->code, ' ', + ($smtp->message =~ /\n([^\n]+\n)$/s), "\n"; + } else { + print "Result: OK\n"; + } + } + + return 1; +} + +$reply_to = $initial_reply_to; +$references = $initial_reply_to || ''; +$subject = $initial_subject; +$message_num = 0; + +foreach my $t (@files) { + open my $fh, "<", $t or die "can't open file $t"; + + my $author = undef; + my $author_encoding; + my $has_content_type; + my $body_encoding; + @to = (); + @cc = (); + @xh = (); + my $input_format = undef; + my @header = (); + $message = ""; + $message_num++; + # First unfold multiline header fields + while(<$fh>) { + last if /^\s*$/; + if (/^\s+\S/ and @header) { + chomp($header[$#header]); + s/^\s+/ /; + $header[$#header] .= $_; + } else { + push(@header, $_); + } + } + # Now parse the header + foreach(@header) { + if (/^From /) { + $input_format = 'mbox'; + next; + } + chomp; + if (!defined $input_format && /^[-A-Za-z]+:\s/) { + $input_format = 'mbox'; + } + + if (defined $input_format && $input_format eq 'mbox') { + if (/^Subject:\s+(.*)$/) { + $subject = $1; + } + elsif (/^From:\s+(.*)$/) { + ($author, $author_encoding) = unquote_rfc2047($1); + next if $suppress_cc{'author'}; + next if $suppress_cc{'self'} and $author eq $sender; + printf("(mbox) Adding cc: %s from line '%s'\n", + $1, $_) unless $quiet; + push @cc, $1; + } + elsif (/^To:\s+(.*)$/) { + foreach my $addr (parse_address_line($1)) { + printf("(mbox) Adding to: %s from line '%s'\n", + $addr, $_) unless $quiet; + push @to, sanitize_address($addr); + } + } + elsif (/^Cc:\s+(.*)$/) { + foreach my $addr (parse_address_line($1)) { + if (unquote_rfc2047($addr) eq $sender) { + next if ($suppress_cc{'self'}); + } else { + next if ($suppress_cc{'cc'}); + } + printf("(mbox) Adding cc: %s from line '%s'\n", + $addr, $_) unless $quiet; + push @cc, $addr; + } + } + elsif (/^Content-type:/i) { + $has_content_type = 1; + if (/charset="?([^ "]+)/) { + $body_encoding = $1; + } + push @xh, $_; + } + elsif (/^Message-Id: (.*)/i) { + $message_id = $1; + } + elsif (!/^Date:\s/ && /^[-A-Za-z]+:\s+\S/) { + push @xh, $_; + } + + } else { + # In the traditional + # "send lots of email" format, + # line 1 = cc + # line 2 = subject + # So let's support that, too. + $input_format = 'lots'; + if (@cc == 0 && !$suppress_cc{'cc'}) { + printf("(non-mbox) Adding cc: %s from line '%s'\n", + $_, $_) unless $quiet; + push @cc, $_; + } elsif (!defined $subject) { + $subject = $_; + } + } + } + # Now parse the message body + while(<$fh>) { + $message .= $_; + if (/^(Signed-off-by|Cc): (.*)$/i) { + chomp; + my ($what, $c) = ($1, $2); + chomp $c; + if ($c eq $sender) { + next if ($suppress_cc{'self'}); + } else { + next if $suppress_cc{'sob'} and $what =~ /Signed-off-by/i; + next if $suppress_cc{'bodycc'} and $what =~ /Cc/i; + } + push @cc, $c; + printf("(body) Adding cc: %s from line '%s'\n", + $c, $_) unless $quiet; + } + } + close $fh; + + push @to, recipients_cmd("to-cmd", "to", $to_cmd, $t) + if defined $to_cmd; + push @cc, recipients_cmd("cc-cmd", "cc", $cc_cmd, $t) + if defined $cc_cmd && !$suppress_cc{'cccmd'}; + + if ($broken_encoding{$t} && !$has_content_type) { + $has_content_type = 1; + push @xh, "MIME-Version: 1.0", + "Content-Type: text/plain; charset=$auto_8bit_encoding", + "Content-Transfer-Encoding: 8bit"; + $body_encoding = $auto_8bit_encoding; + } + + if ($broken_encoding{$t} && !is_rfc2047_quoted($subject)) { + $subject = quote_rfc2047($subject, $auto_8bit_encoding); + } + + if (defined $author and $author ne $sender) { + $message = "From: $author\n\n$message"; + if (defined $author_encoding) { + if ($has_content_type) { + if ($body_encoding eq $author_encoding) { + # ok, we already have the right encoding + } + else { + # uh oh, we should re-encode + } + } + else { + $has_content_type = 1; + push @xh, + 'MIME-Version: 1.0', + "Content-Type: text/plain; charset=$author_encoding", + 'Content-Transfer-Encoding: 8bit'; + } + } + } + + $needs_confirm = ( + $confirm eq "always" or + ($confirm =~ /^(?:auto|cc)$/ && @cc) or + ($confirm =~ /^(?:auto|compose)$/ && $compose && $message_num == 1)); + $needs_confirm = "inform" if ($needs_confirm && $confirm_unconfigured && @cc); + + @to = (@initial_to, @to); + @cc = (@initial_cc, @cc); + + my $message_was_sent = send_message(); + + # set up for the next message + if ($thread && $message_was_sent && + (chain_reply_to() || !defined $reply_to || length($reply_to) == 0 || + $message_num == 1)) { + $reply_to = $message_id; + if (length $references > 0) { + $references .= "\n $message_id"; + } else { + $references = "$message_id"; + } + } + $message_id = undef; +} + +# Execute a command (e.g. $to_cmd) to get a list of email addresses +# and return a results array +sub recipients_cmd { + my ($prefix, $what, $cmd, $file) = @_; + + my $sanitized_sender = sanitize_address($sender); + my @addresses = (); + open my $fh, "$cmd \Q$file\E |" + or die "($prefix) Could not execute '$cmd'"; + while (my $address = <$fh>) { + $address =~ s/^\s*//g; + $address =~ s/\s*$//g; + $address = sanitize_address($address); + next if ($address eq $sanitized_sender and $suppress_from); + push @addresses, $address; + printf("($prefix) Adding %s: %s from: '%s'\n", + $what, $address, $cmd) unless $quiet; + } + close $fh + or die "($prefix) failed to close pipe to '$cmd'"; + return @addresses; +} + +cleanup_compose_files(); + +sub cleanup_compose_files { + unlink($compose_filename, $compose_filename . ".final") if $compose; +} + +$smtp->quit if $smtp; + +sub unique_email_list { + my %seen; + my @emails; + + foreach my $entry (@_) { + if (my $clean = extract_valid_address($entry)) { + $seen{$clean} ||= 0; + next if $seen{$clean}++; + push @emails, $entry; + } else { + print STDERR "W: unable to extract a valid address", + " from: $entry\n"; + } + } + return @emails; +} + +sub validate_patch { + my $fn = shift; + open(my $fh, '<', $fn) + or die "unable to open $fn: $!\n"; + while (my $line = <$fh>) { + if (length($line) > 998) { + return "$.: patch contains a line longer than 998 characters"; + } + } + return undef; +} + +sub file_has_nonascii { + my $fn = shift; + open(my $fh, '<', $fn) + or die "unable to open $fn: $!\n"; + while (my $line = <$fh>) { + return 1 if $line =~ /[^[:ascii:]]/; + } + return 0; +} + +sub body_or_subject_has_nonascii { + my $fn = shift; + open(my $fh, '<', $fn) + or die "unable to open $fn: $!\n"; + while (my $line = <$fh>) { + last if $line =~ /^$/; + return 1 if $line =~ /^Subject.*[^[:ascii:]]/; + } + while (my $line = <$fh>) { + return 1 if $line =~ /[^[:ascii:]]/; + } + return 0; +} diff --git a/SparkleShare/Mac/git/libexec/git-core/git-send-pack b/SparkleShare/Mac/git/libexec/git-core/git-send-pack new file mode 120000 index 00000000..f2cccfc2 --- /dev/null +++ b/SparkleShare/Mac/git/libexec/git-core/git-send-pack @@ -0,0 +1 @@ +../../bin/git \ No newline at end of file diff --git a/SparkleShare/Mac/git/libexec/git-core/git-sh-setup b/SparkleShare/Mac/git/libexec/git-core/git-sh-setup new file mode 100644 index 00000000..b5ede679 --- /dev/null +++ b/SparkleShare/Mac/git/libexec/git-core/git-sh-setup @@ -0,0 +1,253 @@ +#!/bin/sh +# +# This is included in commands that either have to be run from the toplevel +# of the repository, or with GIT_DIR environment variable properly. +# If the GIT_DIR does not look like the right correct git-repository, +# it dies. + +# Having this variable in your environment would break scripts because +# you would cause "cd" to be taken to unexpected places. If you +# like CDPATH, define it for your interactive shell sessions without +# exporting it. +unset CDPATH + +git_broken_path_fix () { + case ":$PATH:" in + *:$1:*) : ok ;; + *) + PATH=$( + SANE_TOOL_PATH="$1" + IFS=: path= sep= + set x $PATH + shift + for elem + do + case "$SANE_TOOL_PATH:$elem" in + (?*:/bin | ?*:/usr/bin) + path="$path$sep$SANE_TOOL_PATH" + sep=: + SANE_TOOL_PATH= + esac + path="$path$sep$elem" + sep=: + done + echo "$path" + ) + ;; + esac +} + + +die() { + echo >&2 "$@" + exit 1 +} + +GIT_QUIET= + +say () { + if test -z "$GIT_QUIET" + then + printf '%s\n' "$*" + fi +} + +if test -n "$OPTIONS_SPEC"; then + usage() { + "$0" -h + exit 1 + } + + parseopt_extra= + [ -n "$OPTIONS_KEEPDASHDASH" ] && + parseopt_extra="--keep-dashdash" + + eval "$( + echo "$OPTIONS_SPEC" | + git rev-parse --parseopt $parseopt_extra -- "$@" || + echo exit $? + )" +else + dashless=$(basename "$0" | sed -e 's/-/ /') + usage() { + die "Usage: $dashless $USAGE" + } + + if [ -z "$LONG_USAGE" ] + then + LONG_USAGE="Usage: $dashless $USAGE" + else + LONG_USAGE="Usage: $dashless $USAGE + +$LONG_USAGE" + fi + + case "$1" in + -h|--h|--he|--hel|--help) + echo "$LONG_USAGE" + exit + esac +fi + +set_reflog_action() { + if [ -z "${GIT_REFLOG_ACTION:+set}" ] + then + GIT_REFLOG_ACTION="$*" + export GIT_REFLOG_ACTION + fi +} + +git_editor() { + if test -z "${GIT_EDITOR:+set}" + then + GIT_EDITOR="$(git var GIT_EDITOR)" || return $? + fi + + eval "$GIT_EDITOR" '"$@"' +} + +git_pager() { + if test -t 1 + then + GIT_PAGER=$(git var GIT_PAGER) + else + GIT_PAGER=cat + fi + : ${LESS=-FRSX} + export LESS + + eval "$GIT_PAGER" '"$@"' +} + +sane_grep () { + GREP_OPTIONS= LC_ALL=C grep "$@" +} + +sane_egrep () { + GREP_OPTIONS= LC_ALL=C egrep "$@" +} + +is_bare_repository () { + git rev-parse --is-bare-repository +} + +cd_to_toplevel () { + cdup=$(git rev-parse --show-toplevel) && + cd "$cdup" || { + echo >&2 "Cannot chdir to $cdup, the toplevel of the working tree" + exit 1 + } +} + +require_work_tree () { + test "$(git rev-parse --is-inside-work-tree 2>/dev/null)" = true || + die "fatal: $0 cannot be used without a working tree." +} + +require_clean_work_tree () { + git rev-parse --verify HEAD >/dev/null || exit 1 + git update-index -q --ignore-submodules --refresh + err=0 + + if ! git diff-files --quiet --ignore-submodules + then + echo >&2 "Cannot $1: You have unstaged changes." + err=1 + fi + + if ! git diff-index --cached --quiet --ignore-submodules HEAD -- + then + if [ $err = 0 ] + then + echo >&2 "Cannot $1: Your index contains uncommitted changes." + else + echo >&2 "Additionally, your index contains uncommitted changes." + fi + err=1 + fi + + if [ $err = 1 ] + then + test -n "$2" && echo >&2 "$2" + exit 1 + fi +} + +get_author_ident_from_commit () { + pick_author_script=' + /^author /{ + s/'\''/'\''\\'\'\''/g + h + s/^author \([^<]*\) <[^>]*> .*$/\1/ + s/.*/GIT_AUTHOR_NAME='\''&'\''/p + + g + s/^author [^<]* <\([^>]*\)> .*$/\1/ + s/.*/GIT_AUTHOR_EMAIL='\''&'\''/p + + g + s/^author [^<]* <[^>]*> \(.*\)$/\1/ + s/.*/GIT_AUTHOR_DATE='\''&'\''/p + + q + } + ' + encoding=$(git config i18n.commitencoding || echo UTF-8) + git show -s --pretty=raw --encoding="$encoding" "$1" -- | + LANG=C LC_ALL=C sed -ne "$pick_author_script" +} + +# Clear repo-local GIT_* environment variables. Useful when switching to +# another repository (e.g. when entering a submodule). See also the env +# list in git_connect() +clear_local_git_env() { + unset $(git rev-parse --local-env-vars) +} + +# Make sure we are in a valid repository of a vintage we understand, +# if we require to be in a git repository. +if test -z "$NONGIT_OK" +then + GIT_DIR=$(git rev-parse --git-dir) || exit + if [ -z "$SUBDIRECTORY_OK" ] + then + test -z "$(git rev-parse --show-cdup)" || { + exit=$? + echo >&2 "You need to run this command from the toplevel of the working tree." + exit $exit + } + fi + test -n "$GIT_DIR" && GIT_DIR=$(cd "$GIT_DIR" && pwd) || { + echo >&2 "Unable to determine absolute path of git directory" + exit 1 + } + : ${GIT_OBJECT_DIRECTORY="$GIT_DIR/objects"} +fi + +# Fix some commands on Windows +case $(uname -s) in +*MINGW*) + # Windows has its own (incompatible) sort and find + sort () { + /usr/bin/sort "$@" + } + find () { + /usr/bin/find "$@" + } + is_absolute_path () { + case "$1" in + [/\\]* | [A-Za-z]:*) + return 0 ;; + esac + return 1 + } + ;; +*) + is_absolute_path () { + case "$1" in + /*) + return 0 ;; + esac + return 1 + } +esac diff --git a/SparkleShare/Mac/git/libexec/git-core/git-shell b/SparkleShare/Mac/git/libexec/git-core/git-shell new file mode 100755 index 00000000..0e3b853d Binary files /dev/null and b/SparkleShare/Mac/git/libexec/git-core/git-shell differ diff --git a/SparkleShare/Mac/git/libexec/git-core/git-shortlog b/SparkleShare/Mac/git/libexec/git-core/git-shortlog new file mode 120000 index 00000000..f2cccfc2 --- /dev/null +++ b/SparkleShare/Mac/git/libexec/git-core/git-shortlog @@ -0,0 +1 @@ +../../bin/git \ No newline at end of file diff --git a/SparkleShare/Mac/git/libexec/git-core/git-show b/SparkleShare/Mac/git/libexec/git-core/git-show new file mode 120000 index 00000000..f2cccfc2 --- /dev/null +++ b/SparkleShare/Mac/git/libexec/git-core/git-show @@ -0,0 +1 @@ +../../bin/git \ No newline at end of file diff --git a/SparkleShare/Mac/git/libexec/git-core/git-show-branch b/SparkleShare/Mac/git/libexec/git-core/git-show-branch new file mode 120000 index 00000000..f2cccfc2 --- /dev/null +++ b/SparkleShare/Mac/git/libexec/git-core/git-show-branch @@ -0,0 +1 @@ +../../bin/git \ No newline at end of file diff --git a/SparkleShare/Mac/git/libexec/git-core/git-show-index b/SparkleShare/Mac/git/libexec/git-core/git-show-index new file mode 100755 index 00000000..54d8da40 Binary files /dev/null and b/SparkleShare/Mac/git/libexec/git-core/git-show-index differ diff --git a/SparkleShare/Mac/git/libexec/git-core/git-show-ref b/SparkleShare/Mac/git/libexec/git-core/git-show-ref new file mode 120000 index 00000000..f2cccfc2 --- /dev/null +++ b/SparkleShare/Mac/git/libexec/git-core/git-show-ref @@ -0,0 +1 @@ +../../bin/git \ No newline at end of file diff --git a/SparkleShare/Mac/git/libexec/git-core/git-stage b/SparkleShare/Mac/git/libexec/git-core/git-stage new file mode 120000 index 00000000..f2cccfc2 --- /dev/null +++ b/SparkleShare/Mac/git/libexec/git-core/git-stage @@ -0,0 +1 @@ +../../bin/git \ No newline at end of file diff --git a/SparkleShare/Mac/git/libexec/git-core/git-stash b/SparkleShare/Mac/git/libexec/git-core/git-stash new file mode 100755 index 00000000..7561b374 --- /dev/null +++ b/SparkleShare/Mac/git/libexec/git-core/git-stash @@ -0,0 +1,504 @@ +#!/bin/sh +# Copyright (c) 2007, Nanako Shiraishi + +dashless=$(basename "$0" | sed -e 's/-/ /') +USAGE="list [] + or: $dashless show [] + or: $dashless drop [-q|--quiet] [] + or: $dashless ( pop | apply ) [--index] [-q|--quiet] [] + or: $dashless branch [] + or: $dashless [save [--patch] [-k|--[no-]keep-index] [-q|--quiet] []] + or: $dashless clear" + +SUBDIRECTORY_OK=Yes +OPTIONS_SPEC= +. git-sh-setup +require_work_tree +cd_to_toplevel + +TMP="$GIT_DIR/.git-stash.$$" +trap 'rm -f "$TMP-*"' 0 + +ref_stash=refs/stash + +if git config --get-colorbool color.interactive; then + help_color="$(git config --get-color color.interactive.help 'red bold')" + reset_color="$(git config --get-color '' reset)" +else + help_color= + reset_color= +fi + +no_changes () { + git diff-index --quiet --cached HEAD --ignore-submodules -- && + git diff-files --quiet --ignore-submodules +} + +clear_stash () { + if test $# != 0 + then + die "git stash clear with parameters is unimplemented" + fi + if current=$(git rev-parse --verify $ref_stash 2>/dev/null) + then + git update-ref -d $ref_stash $current + fi +} + +create_stash () { + stash_msg="$1" + + git update-index -q --refresh + if no_changes + then + exit 0 + fi + + # state of the base commit + if b_commit=$(git rev-parse --verify HEAD) + then + head=$(git rev-list --oneline -n 1 HEAD --) + else + die "You do not have the initial commit yet" + fi + + if branch=$(git symbolic-ref -q HEAD) + then + branch=${branch#refs/heads/} + else + branch='(no branch)' + fi + msg=$(printf '%s: %s' "$branch" "$head") + + # state of the index + i_tree=$(git write-tree) && + i_commit=$(printf 'index on %s\n' "$msg" | + git commit-tree $i_tree -p $b_commit) || + die "Cannot save the current index state" + + if test -z "$patch_mode" + then + + # state of the working tree + w_tree=$( ( + rm -f "$TMP-index" && + cp -p ${GIT_INDEX_FILE-"$GIT_DIR/index"} "$TMP-index" && + GIT_INDEX_FILE="$TMP-index" && + export GIT_INDEX_FILE && + git read-tree -m $i_tree && + git diff --name-only -z HEAD | git update-index -z --add --remove --stdin && + git write-tree && + rm -f "$TMP-index" + ) ) || + die "Cannot save the current worktree state" + + else + + rm -f "$TMP-index" && + GIT_INDEX_FILE="$TMP-index" git read-tree HEAD && + + # find out what the user wants + GIT_INDEX_FILE="$TMP-index" \ + git add--interactive --patch=stash -- && + + # state of the working tree + w_tree=$(GIT_INDEX_FILE="$TMP-index" git write-tree) || + die "Cannot save the current worktree state" + + git diff-tree -p HEAD $w_tree > "$TMP-patch" && + test -s "$TMP-patch" || + die "No changes selected" + + rm -f "$TMP-index" || + die "Cannot remove temporary index (can't happen)" + + fi + + # create the stash + if test -z "$stash_msg" + then + stash_msg=$(printf 'WIP on %s' "$msg") + else + stash_msg=$(printf 'On %s: %s' "$branch" "$stash_msg") + fi + w_commit=$(printf '%s\n' "$stash_msg" | + git commit-tree $w_tree -p $b_commit -p $i_commit) || + die "Cannot record working tree state" +} + +save_stash () { + keep_index= + patch_mode= + while test $# != 0 + do + case "$1" in + -k|--keep-index) + keep_index=t + ;; + --no-keep-index) + keep_index= + ;; + -p|--patch) + patch_mode=t + keep_index=t + ;; + -q|--quiet) + GIT_QUIET=t + ;; + --) + shift + break + ;; + -*) + echo "error: unknown option for 'stash save': $1" + echo " To provide a message, use git stash save -- '$1'" + usage + ;; + *) + break + ;; + esac + shift + done + + stash_msg="$*" + + git update-index -q --refresh + if no_changes + then + say 'No local changes to save' + exit 0 + fi + test -f "$GIT_DIR/logs/$ref_stash" || + clear_stash || die "Cannot initialize stash" + + create_stash "$stash_msg" + + # Make sure the reflog for stash is kept. + : >>"$GIT_DIR/logs/$ref_stash" + + git update-ref -m "$stash_msg" $ref_stash $w_commit || + die "Cannot save the current status" + say Saved working directory and index state "$stash_msg" + + if test -z "$patch_mode" + then + git reset --hard ${GIT_QUIET:+-q} + + if test -n "$keep_index" && test -n $i_tree + then + git read-tree --reset -u $i_tree + fi + else + git apply -R < "$TMP-patch" || + die "Cannot remove worktree changes" + + if test -z "$keep_index" + then + git reset + fi + fi +} + +have_stash () { + git rev-parse --verify $ref_stash >/dev/null 2>&1 +} + +list_stash () { + have_stash || return 0 + git log --format="%gd: %gs" -g "$@" $ref_stash -- +} + +show_stash () { + assert_stash_like "$@" + + git diff ${FLAGS:---stat} $b_commit $w_commit +} + +# +# Parses the remaining options looking for flags and +# at most one revision defaulting to ${ref_stash}@{0} +# if none found. +# +# Derives related tree and commit objects from the +# revision, if one is found. +# +# stash records the work tree, and is a merge between the +# base commit (first parent) and the index tree (second parent). +# +# REV is set to the symbolic version of the specified stash-like commit +# IS_STASH_LIKE is non-blank if ${REV} looks like a stash +# IS_STASH_REF is non-blank if the ${REV} looks like a stash ref +# s is set to the SHA1 of the stash commit +# w_commit is set to the commit containing the working tree +# b_commit is set to the base commit +# i_commit is set to the commit containing the index tree +# w_tree is set to the working tree +# b_tree is set to the base tree +# i_tree is set to the index tree +# +# GIT_QUIET is set to t if -q is specified +# INDEX_OPTION is set to --index if --index is specified. +# FLAGS is set to the remaining flags +# +# dies if: +# * too many revisions specified +# * no revision is specified and there is no stash stack +# * a revision is specified which cannot be resolve to a SHA1 +# * a non-existent stash reference is specified +# + +parse_flags_and_rev() +{ + test "$PARSE_CACHE" = "$*" && return 0 # optimisation + PARSE_CACHE="$*" + + IS_STASH_LIKE= + IS_STASH_REF= + INDEX_OPTION= + s= + w_commit= + b_commit= + i_commit= + w_tree= + b_tree= + i_tree= + + REV=$(git rev-parse --no-flags --symbolic "$@" 2>/dev/null) + + FLAGS= + for opt + do + case "$opt" in + -q|--quiet) + GIT_QUIET=-t + ;; + --index) + INDEX_OPTION=--index + ;; + -*) + FLAGS="${FLAGS}${FLAGS:+ }$opt" + ;; + esac + done + + set -- $REV + + case $# in + 0) + have_stash || die "No stash found." + set -- ${ref_stash}@{0} + ;; + 1) + : + ;; + *) + die "Too many revisions specified: $REV" + ;; + esac + + REV=$(git rev-parse --quiet --symbolic --verify $1 2>/dev/null) || die "$1 is not valid reference" + + i_commit=$(git rev-parse --quiet --verify $REV^2 2>/dev/null) && + set -- $(git rev-parse $REV $REV^1 $REV: $REV^1: $REV^2: 2>/dev/null) && + s=$1 && + w_commit=$1 && + b_commit=$2 && + w_tree=$3 && + b_tree=$4 && + i_tree=$5 && + IS_STASH_LIKE=t && + test "$ref_stash" = "$(git rev-parse --symbolic-full-name "${REV%@*}")" && + IS_STASH_REF=t + + if test "${REV}" != "${REV%{*\}}" + then + # maintainers: it would be better if git rev-parse indicated + # this condition with a non-zero status code but as of 1.7.2.1 it + # it did not. So, we use non-empty stderr output as a proxy for the + # condition of interest. + test -z "$(git rev-parse "$REV" 2>&1 >/dev/null)" || die "$REV does not exist in the stash log" + fi + +} + +is_stash_like() +{ + parse_flags_and_rev "$@" + test -n "$IS_STASH_LIKE" +} + +assert_stash_like() { + is_stash_like "$@" || die "'$*' is not a stash-like commit" +} + +is_stash_ref() { + is_stash_like "$@" && test -n "$IS_STASH_REF" +} + +assert_stash_ref() { + is_stash_ref "$@" || die "'$*' is not a stash reference" +} + +apply_stash () { + + assert_stash_like "$@" + + git update-index -q --refresh && + git diff-files --quiet --ignore-submodules || + die 'Cannot apply to a dirty working tree, please stage your changes' + + # current index state + c_tree=$(git write-tree) || + die 'Cannot apply a stash in the middle of a merge' + + unstashed_index_tree= + if test -n "$INDEX_OPTION" && test "$b_tree" != "$i_tree" && + test "$c_tree" != "$i_tree" + then + git diff-tree --binary $s^2^..$s^2 | git apply --cached + test $? -ne 0 && + die 'Conflicts in index. Try without --index.' + unstashed_index_tree=$(git write-tree) || + die 'Could not save index tree' + git reset + fi + + eval " + GITHEAD_$w_tree='Stashed changes' && + GITHEAD_$c_tree='Updated upstream' && + GITHEAD_$b_tree='Version stash was based on' && + export GITHEAD_$w_tree GITHEAD_$c_tree GITHEAD_$b_tree + " + + if test -n "$GIT_QUIET" + then + GIT_MERGE_VERBOSITY=0 && export GIT_MERGE_VERBOSITY + fi + if git merge-recursive $b_tree -- $c_tree $w_tree + then + # No conflict + if test -n "$unstashed_index_tree" + then + git read-tree "$unstashed_index_tree" + else + a="$TMP-added" && + git diff-index --cached --name-only --diff-filter=A $c_tree >"$a" && + git read-tree --reset $c_tree && + git update-index --add --stdin <"$a" || + die "Cannot unstage modified files" + rm -f "$a" + fi + squelch= + if test -n "$GIT_QUIET" + then + squelch='>/dev/null 2>&1' + fi + eval "git status $squelch" || : + else + # Merge conflict; keep the exit status from merge-recursive + status=$? + if test -n "$INDEX_OPTION" + then + echo >&2 'Index was not unstashed.' + fi + exit $status + fi +} + +pop_stash() { + assert_stash_ref "$@" + + apply_stash "$@" && + drop_stash "$@" +} + +drop_stash () { + assert_stash_ref "$@" + + git reflog delete --updateref --rewrite "${REV}" && + say "Dropped ${REV} ($s)" || die "${REV}: Could not drop stash entry" + + # clear_stash if we just dropped the last stash entry + git rev-parse --verify "$ref_stash@{0}" > /dev/null 2>&1 || clear_stash +} + +apply_to_branch () { + test -n "$1" || die 'No branch name specified' + branch=$1 + shift 1 + + set -- --index "$@" + assert_stash_like "$@" + + git checkout -b $branch $REV^ && + apply_stash "$@" && { + test -z "$IS_STASH_REF" || drop_stash "$@" + } +} + +PARSE_CACHE='--not-parsed' +# The default command is "save" if nothing but options are given +seen_non_option= +for opt +do + case "$opt" in + -*) ;; + *) seen_non_option=t; break ;; + esac +done + +test -n "$seen_non_option" || set "save" "$@" + +# Main command set +case "$1" in +list) + shift + list_stash "$@" + ;; +show) + shift + show_stash "$@" + ;; +save) + shift + save_stash "$@" + ;; +apply) + shift + apply_stash "$@" + ;; +clear) + shift + clear_stash "$@" + ;; +create) + if test $# -gt 0 && test "$1" = create + then + shift + fi + create_stash "$*" && echo "$w_commit" + ;; +drop) + shift + drop_stash "$@" + ;; +pop) + shift + pop_stash "$@" + ;; +branch) + shift + apply_to_branch "$@" + ;; +*) + case $# in + 0) + save_stash && + say '(To restore them type "git stash apply")' + ;; + *) + usage + esac + ;; +esac diff --git a/SparkleShare/Mac/git/libexec/git-core/git-status b/SparkleShare/Mac/git/libexec/git-core/git-status new file mode 120000 index 00000000..f2cccfc2 --- /dev/null +++ b/SparkleShare/Mac/git/libexec/git-core/git-status @@ -0,0 +1 @@ +../../bin/git \ No newline at end of file diff --git a/SparkleShare/Mac/git/libexec/git-core/git-stripspace b/SparkleShare/Mac/git/libexec/git-core/git-stripspace new file mode 120000 index 00000000..f2cccfc2 --- /dev/null +++ b/SparkleShare/Mac/git/libexec/git-core/git-stripspace @@ -0,0 +1 @@ +../../bin/git \ No newline at end of file diff --git a/SparkleShare/Mac/git/libexec/git-core/git-submodule b/SparkleShare/Mac/git/libexec/git-core/git-submodule new file mode 100755 index 00000000..8b905897 --- /dev/null +++ b/SparkleShare/Mac/git/libexec/git-core/git-submodule @@ -0,0 +1,903 @@ +#!/bin/sh +# +# git-submodules.sh: add, init, update or list git submodules +# +# Copyright (c) 2007 Lars Hjemli + +dashless=$(basename "$0" | sed -e 's/-/ /') +USAGE="[--quiet] add [-b branch] [-f|--force] [--reference ] [--] [] + or: $dashless [--quiet] status [--cached] [--recursive] [--] [...] + or: $dashless [--quiet] init [--] [...] + or: $dashless [--quiet] update [--init] [-N|--no-fetch] [--rebase] [--reference ] [--merge] [--recursive] [--] [...] + or: $dashless [--quiet] summary [--cached|--files] [--summary-limit ] [commit] [--] [...] + or: $dashless [--quiet] foreach [--recursive] + or: $dashless [--quiet] sync [--] [...]" +OPTIONS_SPEC= +. git-sh-setup +. git-parse-remote +require_work_tree + +command= +branch= +force= +reference= +cached= +recursive= +init= +files= +nofetch= +update= +prefix= + +# Resolve relative url by appending to parent's url +resolve_relative_url () +{ + remote=$(get_default_remote) + remoteurl=$(git config "remote.$remote.url") || + die "remote ($remote) does not have a url defined in .git/config" + url="$1" + remoteurl=${remoteurl%/} + sep=/ + while test -n "$url" + do + case "$url" in + ../*) + url="${url#../}" + case "$remoteurl" in + */*) + remoteurl="${remoteurl%/*}" + ;; + *:*) + remoteurl="${remoteurl%:*}" + sep=: + ;; + *) + die "cannot strip one component off url '$remoteurl'" + ;; + esac + ;; + ./*) + url="${url#./}" + ;; + *) + break;; + esac + done + echo "$remoteurl$sep${url%/}" +} + +# +# Get submodule info for registered submodules +# $@ = path to limit submodule list +# +module_list() +{ + git ls-files --error-unmatch --stage -- "$@" | sane_grep '^160000 ' +} + +# +# Map submodule path to submodule name +# +# $1 = path +# +module_name() +{ + # Do we have "submodule..path = $1" defined in .gitmodules file? + re=$(printf '%s\n' "$1" | sed -e 's/[].[^$\\*]/\\&/g') + name=$( git config -f .gitmodules --get-regexp '^submodule\..*\.path$' | + sed -n -e 's|^submodule\.\(.*\)\.path '"$re"'$|\1|p' ) + test -z "$name" && + die "No submodule mapping found in .gitmodules for path '$path'" + echo "$name" +} + +# +# Clone a submodule +# +# Prior to calling, cmd_update checks that a possibly existing +# path is not a git repository. +# Likewise, cmd_add checks that path does not exist at all, +# since it is the location of a new submodule. +# +module_clone() +{ + path=$1 + url=$2 + reference="$3" + + if test -n "$reference" + then + git-clone "$reference" -n "$url" "$path" + else + git-clone -n "$url" "$path" + fi || + die "Clone of '$url' into submodule path '$path' failed" +} + +# +# Add a new submodule to the working tree, .gitmodules and the index +# +# $@ = repo path +# +# optional branch is stored in global branch variable +# +cmd_add() +{ + # parse $args after "submodule ... add". + while test $# -ne 0 + do + case "$1" in + -b | --branch) + case "$2" in '') usage ;; esac + branch=$2 + shift + ;; + -f | --force) + force=$1 + ;; + -q|--quiet) + GIT_QUIET=1 + ;; + --reference) + case "$2" in '') usage ;; esac + reference="--reference=$2" + shift + ;; + --reference=*) + reference="$1" + shift + ;; + --) + shift + break + ;; + -*) + usage + ;; + *) + break + ;; + esac + shift + done + + repo=$1 + path=$2 + + if test -z "$path"; then + path=$(echo "$repo" | + sed -e 's|/$||' -e 's|:*/*\.git$||' -e 's|.*[/:]||g') + fi + + if test -z "$repo" -o -z "$path"; then + usage + fi + + # assure repo is absolute or relative to parent + case "$repo" in + ./*|../*) + # dereference source url relative to parent's url + realrepo=$(resolve_relative_url "$repo") || exit + ;; + *:*|/*) + # absolute url + realrepo=$repo + ;; + *) + die "repo URL: '$repo' must be absolute or begin with ./|../" + ;; + esac + + # normalize path: + # multiple //; leading ./; /./; /../; trailing / + path=$(printf '%s/\n' "$path" | + sed -e ' + s|//*|/|g + s|^\(\./\)*|| + s|/\./|/|g + :start + s|\([^/]*\)/\.\./|| + tstart + s|/*$|| + ') + git ls-files --error-unmatch "$path" > /dev/null 2>&1 && + die "'$path' already exists in the index" + + if test -z "$force" && ! git add --dry-run --ignore-missing "$path" > /dev/null 2>&1 + then + echo >&2 "The following path is ignored by one of your .gitignore files:" && + echo >&2 $path && + echo >&2 "Use -f if you really want to add it." + exit 1 + fi + + # perhaps the path exists and is already a git repo, else clone it + if test -e "$path" + then + if test -d "$path"/.git -o -f "$path"/.git + then + echo "Adding existing repo at '$path' to the index" + else + die "'$path' already exists and is not a valid git repo" + fi + + case "$repo" in + ./*|../*) + url=$(resolve_relative_url "$repo") || exit + ;; + *) + url="$repo" + ;; + esac + git config submodule."$path".url "$url" + else + + module_clone "$path" "$realrepo" "$reference" || exit + ( + clear_local_git_env + cd "$path" && + # ash fails to wordsplit ${branch:+-b "$branch"...} + case "$branch" in + '') git checkout -f -q ;; + ?*) git checkout -f -q -B "$branch" "origin/$branch" ;; + esac + ) || die "Unable to checkout submodule '$path'" + fi + + git add $force "$path" || + die "Failed to add submodule '$path'" + + git config -f .gitmodules submodule."$path".path "$path" && + git config -f .gitmodules submodule."$path".url "$repo" && + git add --force .gitmodules || + die "Failed to register submodule '$path'" +} + +# +# Execute an arbitrary command sequence in each checked out +# submodule +# +# $@ = command to execute +# +cmd_foreach() +{ + # parse $args after "submodule ... foreach". + while test $# -ne 0 + do + case "$1" in + -q|--quiet) + GIT_QUIET=1 + ;; + --recursive) + recursive=1 + ;; + -*) + usage + ;; + *) + break + ;; + esac + shift + done + + toplevel=$(pwd) + + module_list | + while read mode sha1 stage path + do + if test -e "$path"/.git + then + say "Entering '$prefix$path'" + name=$(module_name "$path") + ( + prefix="$prefix$path/" + clear_local_git_env + cd "$path" && + eval "$@" && + if test -n "$recursive" + then + cmd_foreach "--recursive" "$@" + fi + ) || + die "Stopping at '$path'; script returned non-zero status." + fi + done +} + +# +# Register submodules in .git/config +# +# $@ = requested paths (default to all) +# +cmd_init() +{ + # parse $args after "submodule ... init". + while test $# -ne 0 + do + case "$1" in + -q|--quiet) + GIT_QUIET=1 + ;; + --) + shift + break + ;; + -*) + usage + ;; + *) + break + ;; + esac + shift + done + + module_list "$@" | + while read mode sha1 stage path + do + # Skip already registered paths + name=$(module_name "$path") || exit + url=$(git config submodule."$name".url) + test -z "$url" || continue + + url=$(git config -f .gitmodules submodule."$name".url) + test -z "$url" && + die "No url found for submodule path '$path' in .gitmodules" + + # Possibly a url relative to parent + case "$url" in + ./*|../*) + url=$(resolve_relative_url "$url") || exit + ;; + esac + + git config submodule."$name".url "$url" || + die "Failed to register url for submodule path '$path'" + + upd="$(git config -f .gitmodules submodule."$name".update)" + test -z "$upd" || + git config submodule."$name".update "$upd" || + die "Failed to register update mode for submodule path '$path'" + + say "Submodule '$name' ($url) registered for path '$path'" + done +} + +# +# Update each submodule path to correct revision, using clone and checkout as needed +# +# $@ = requested paths (default to all) +# +cmd_update() +{ + # parse $args after "submodule ... update". + orig_flags= + while test $# -ne 0 + do + case "$1" in + -q|--quiet) + GIT_QUIET=1 + ;; + -i|--init) + init=1 + ;; + -N|--no-fetch) + nofetch=1 + ;; + -r|--rebase) + update="rebase" + ;; + --reference) + case "$2" in '') usage ;; esac + reference="--reference=$2" + orig_flags="$orig_flags $(git rev-parse --sq-quote "$1")" + shift + ;; + --reference=*) + reference="$1" + ;; + -m|--merge) + update="merge" + ;; + --recursive) + recursive=1 + ;; + --) + shift + break + ;; + -*) + usage + ;; + *) + break + ;; + esac + orig_flags="$orig_flags $(git rev-parse --sq-quote "$1")" + shift + done + + if test -n "$init" + then + cmd_init "--" "$@" || return + fi + + module_list "$@" | + while read mode sha1 stage path + do + name=$(module_name "$path") || exit + url=$(git config submodule."$name".url) + update_module=$(git config submodule."$name".update) + if test -z "$url" + then + # Only mention uninitialized submodules when its + # path have been specified + test "$#" != "0" && + say "Submodule path '$path' not initialized" && + say "Maybe you want to use 'update --init'?" + continue + fi + + if ! test -d "$path"/.git -o -f "$path"/.git + then + module_clone "$path" "$url" "$reference"|| exit + subsha1= + else + subsha1=$(clear_local_git_env; cd "$path" && + git rev-parse --verify HEAD) || + die "Unable to find current revision in submodule path '$path'" + fi + + if ! test -z "$update" + then + update_module=$update + fi + + if test "$subsha1" != "$sha1" + then + force= + if test -z "$subsha1" + then + force="-f" + fi + + if test -z "$nofetch" + then + (clear_local_git_env; cd "$path" && + git-fetch) || + die "Unable to fetch in submodule path '$path'" + fi + + case "$update_module" in + rebase) + command="git rebase" + action="rebase" + msg="rebased onto" + ;; + merge) + command="git merge" + action="merge" + msg="merged in" + ;; + *) + command="git checkout $force -q" + action="checkout" + msg="checked out" + ;; + esac + + (clear_local_git_env; cd "$path" && $command "$sha1") || + die "Unable to $action '$sha1' in submodule path '$path'" + say "Submodule path '$path': $msg '$sha1'" + fi + + if test -n "$recursive" + then + (clear_local_git_env; cd "$path" && eval cmd_update "$orig_flags") || + die "Failed to recurse into submodule path '$path'" + fi + done +} + +set_name_rev () { + revname=$( ( + clear_local_git_env + cd "$1" && { + git describe "$2" 2>/dev/null || + git describe --tags "$2" 2>/dev/null || + git describe --contains "$2" 2>/dev/null || + git describe --all --always "$2" + } + ) ) + test -z "$revname" || revname=" ($revname)" +} +# +# Show commit summary for submodules in index or working tree +# +# If '--cached' is given, show summary between index and given commit, +# or between working tree and given commit +# +# $@ = [commit (default 'HEAD'),] requested paths (default all) +# +cmd_summary() { + summary_limit=-1 + for_status= + diff_cmd=diff-index + + # parse $args after "submodule ... summary". + while test $# -ne 0 + do + case "$1" in + --cached) + cached="$1" + ;; + --files) + files="$1" + ;; + --for-status) + for_status="$1" + ;; + -n|--summary-limit) + if summary_limit=$(($2 + 0)) 2>/dev/null && test "$summary_limit" = "$2" + then + : + else + usage + fi + shift + ;; + --) + shift + break + ;; + -*) + usage + ;; + *) + break + ;; + esac + shift + done + + test $summary_limit = 0 && return + + if rev=$(git rev-parse -q --verify --default HEAD ${1+"$1"}) + then + head=$rev + test $# = 0 || shift + elif test -z "$1" -o "$1" = "HEAD" + then + # before the first commit: compare with an empty tree + head=$(git hash-object -w -t tree --stdin module) + test $status = D -o $status = T && echo "$name" && continue + # Also show added or modified modules which are checked out + GIT_DIR="$name/.git" git-rev-parse --git-dir >/dev/null 2>&1 && + echo "$name" + done + ) + + test -z "$modules" && return + + git $diff_cmd $cached --ignore-submodules=dirty --raw $head -- $modules | + sane_egrep '^:([0-7]* )?160000' | + cut -c2- | + while read mod_src mod_dst sha1_src sha1_dst status name + do + if test -z "$cached" && + test $sha1_dst = 0000000000000000000000000000000000000000 + then + case "$mod_dst" in + 160000) + sha1_dst=$(GIT_DIR="$name/.git" git rev-parse HEAD) + ;; + 100644 | 100755 | 120000) + sha1_dst=$(git hash-object $name) + ;; + 000000) + ;; # removed + *) + # unexpected type + echo >&2 "unexpected mode $mod_dst" + continue ;; + esac + fi + missing_src= + missing_dst= + + test $mod_src = 160000 && + ! GIT_DIR="$name/.git" git-rev-parse -q --verify $sha1_src^0 >/dev/null && + missing_src=t + + test $mod_dst = 160000 && + ! GIT_DIR="$name/.git" git-rev-parse -q --verify $sha1_dst^0 >/dev/null && + missing_dst=t + + total_commits= + case "$missing_src,$missing_dst" in + t,) + errmsg=" Warn: $name doesn't contain commit $sha1_src" + ;; + ,t) + errmsg=" Warn: $name doesn't contain commit $sha1_dst" + ;; + t,t) + errmsg=" Warn: $name doesn't contain commits $sha1_src and $sha1_dst" + ;; + *) + errmsg= + total_commits=$( + if test $mod_src = 160000 -a $mod_dst = 160000 + then + range="$sha1_src...$sha1_dst" + elif test $mod_src = 160000 + then + range=$sha1_src + else + range=$sha1_dst + fi + GIT_DIR="$name/.git" \ + git rev-list --first-parent $range -- | wc -l + ) + total_commits=" ($(($total_commits + 0)))" + ;; + esac + + sha1_abbr_src=$(echo $sha1_src | cut -c1-7) + sha1_abbr_dst=$(echo $sha1_dst | cut -c1-7) + if test $status = T + then + if test $mod_dst = 160000 + then + echo "* $name $sha1_abbr_src(blob)->$sha1_abbr_dst(submodule)$total_commits:" + else + echo "* $name $sha1_abbr_src(submodule)->$sha1_abbr_dst(blob)$total_commits:" + fi + else + echo "* $name $sha1_abbr_src...$sha1_abbr_dst$total_commits:" + fi + if test -n "$errmsg" + then + # Don't give error msg for modification whose dst is not submodule + # i.e. deleted or changed to blob + test $mod_dst = 160000 && echo "$errmsg" + else + if test $mod_src = 160000 -a $mod_dst = 160000 + then + limit= + test $summary_limit -gt 0 && limit="-$summary_limit" + GIT_DIR="$name/.git" \ + git log $limit --pretty='format: %m %s' \ + --first-parent $sha1_src...$sha1_dst + elif test $mod_dst = 160000 + then + GIT_DIR="$name/.git" \ + git log --pretty='format: > %s' -1 $sha1_dst + else + GIT_DIR="$name/.git" \ + git log --pretty='format: < %s' -1 $sha1_src + fi + echo + fi + echo + done | + if test -n "$for_status"; then + if [ -n "$files" ]; then + echo "# Submodules changed but not updated:" + else + echo "# Submodule changes to be committed:" + fi + echo "#" + sed -e 's|^|# |' -e 's|^# $|#|' + else + cat + fi +} +# +# List all submodules, prefixed with: +# - submodule not initialized +# + different revision checked out +# +# If --cached was specified the revision in the index will be printed +# instead of the currently checked out revision. +# +# $@ = requested paths (default to all) +# +cmd_status() +{ + # parse $args after "submodule ... status". + orig_flags= + while test $# -ne 0 + do + case "$1" in + -q|--quiet) + GIT_QUIET=1 + ;; + --cached) + cached=1 + ;; + --recursive) + recursive=1 + ;; + --) + shift + break + ;; + -*) + usage + ;; + *) + break + ;; + esac + orig_flags="$orig_flags $(git rev-parse --sq-quote "$1")" + shift + done + + module_list "$@" | + while read mode sha1 stage path + do + name=$(module_name "$path") || exit + url=$(git config submodule."$name".url) + displaypath="$prefix$path" + if test -z "$url" || ! test -d "$path"/.git -o -f "$path"/.git + then + say "-$sha1 $displaypath" + continue; + fi + set_name_rev "$path" "$sha1" + if git diff-files --ignore-submodules=dirty --quiet -- "$path" + then + say " $sha1 $displaypath$revname" + else + if test -z "$cached" + then + sha1=$(clear_local_git_env; cd "$path" && git rev-parse --verify HEAD) + set_name_rev "$path" "$sha1" + fi + say "+$sha1 $displaypath$revname" + fi + + if test -n "$recursive" + then + ( + prefix="$displaypath/" + clear_local_git_env + cd "$path" && + eval cmd_status "$orig_args" + ) || + die "Failed to recurse into submodule path '$path'" + fi + done +} +# +# Sync remote urls for submodules +# This makes the value for remote.$remote.url match the value +# specified in .gitmodules. +# +cmd_sync() +{ + while test $# -ne 0 + do + case "$1" in + -q|--quiet) + GIT_QUIET=1 + shift + ;; + --) + shift + break + ;; + -*) + usage + ;; + *) + break + ;; + esac + done + cd_to_toplevel + module_list "$@" | + while read mode sha1 stage path + do + name=$(module_name "$path") + url=$(git config -f .gitmodules --get submodule."$name".url) + + # Possibly a url relative to parent + case "$url" in + ./*|../*) + url=$(resolve_relative_url "$url") || exit + ;; + esac + + say "Synchronizing submodule url for '$name'" + git config submodule."$name".url "$url" + + if test -e "$path"/.git + then + ( + clear_local_git_env + cd "$path" + remote=$(get_default_remote) + git config remote."$remote".url "$url" + ) + fi + done +} + +# This loop parses the command line arguments to find the +# subcommand name to dispatch. Parsing of the subcommand specific +# options are primarily done by the subcommand implementations. +# Subcommand specific options such as --branch and --cached are +# parsed here as well, for backward compatibility. + +while test $# != 0 && test -z "$command" +do + case "$1" in + add | foreach | init | update | status | summary | sync) + command=$1 + ;; + -q|--quiet) + GIT_QUIET=1 + ;; + -b|--branch) + case "$2" in + '') + usage + ;; + esac + branch="$2"; shift + ;; + --cached) + cached="$1" + ;; + --) + break + ;; + -*) + usage + ;; + *) + break + ;; + esac + shift +done + +# No command word defaults to "status" +test -n "$command" || command=status + +# "-b branch" is accepted only by "add" +if test -n "$branch" && test "$command" != add +then + usage +fi + +# "--cached" is accepted only by "status" and "summary" +if test -n "$cached" && test "$command" != status -a "$command" != summary +then + usage +fi + +"cmd_$command" "$@" diff --git a/SparkleShare/Mac/git/libexec/git-core/git-svn b/SparkleShare/Mac/git/libexec/git-core/git-svn new file mode 100755 index 00000000..557dc935 --- /dev/null +++ b/SparkleShare/Mac/git/libexec/git-core/git-svn @@ -0,0 +1,6254 @@ +#!/usr/bin/perl +use lib (split(/:/, $ENV{GITPERLLIB} || "/usr/local/git/lib/perl5/site_perl")); +# Copyright (C) 2006, Eric Wong +# License: GPL v2 or later +use 5.008; +use warnings; +use strict; +use vars qw/ $AUTHOR $VERSION + $sha1 $sha1_short $_revision $_repository + $_q $_authors $_authors_prog %users/; +$AUTHOR = 'Eric Wong '; +$VERSION = '1.7.4'; + +# From which subdir have we been invoked? +my $cmd_dir_prefix = eval { + command_oneline([qw/rev-parse --show-prefix/], STDERR => 0) +} || ''; + +my $git_dir_user_set = 1 if defined $ENV{GIT_DIR}; +$ENV{GIT_DIR} ||= '.git'; +$Git::SVN::default_repo_id = 'svn'; +$Git::SVN::default_ref_id = $ENV{GIT_SVN_ID} || 'git-svn'; +$Git::SVN::Ra::_log_window_size = 100; +$Git::SVN::_minimize_url = 'unset'; + +if (! exists $ENV{SVN_SSH}) { + if (exists $ENV{GIT_SSH}) { + $ENV{SVN_SSH} = $ENV{GIT_SSH}; + if ($^O eq 'msys') { + $ENV{SVN_SSH} =~ s/\\/\\\\/g; + $ENV{SVN_SSH} =~ s/(.*)/"$1"/; + } + } +} + +$Git::SVN::Log::TZ = $ENV{TZ}; +$ENV{TZ} = 'UTC'; +$| = 1; # unbuffer STDOUT + +sub fatal (@) { print STDERR "@_\n"; exit 1 } +sub _req_svn { + require SVN::Core; # use()-ing this causes segfaults for me... *shrug* + require SVN::Ra; + require SVN::Delta; + if ($SVN::Core::VERSION lt '1.1.0') { + fatal "Need SVN::Core 1.1.0 or better (got $SVN::Core::VERSION)"; + } +} +my $can_compress = eval { require Compress::Zlib; 1}; +push @Git::SVN::Ra::ISA, 'SVN::Ra'; +push @SVN::Git::Editor::ISA, 'SVN::Delta::Editor'; +push @SVN::Git::Fetcher::ISA, 'SVN::Delta::Editor'; +use Carp qw/croak/; +use Digest::MD5; +use IO::File qw//; +use File::Basename qw/dirname basename/; +use File::Path qw/mkpath/; +use File::Spec; +use File::Find; +use Getopt::Long qw/:config gnu_getopt no_ignore_case auto_abbrev/; +use IPC::Open3; +use Git; + +BEGIN { + # import functions from Git into our packages, en masse + no strict 'refs'; + foreach (qw/command command_oneline command_noisy command_output_pipe + command_input_pipe command_close_pipe + command_bidi_pipe command_close_bidi_pipe/) { + for my $package ( qw(SVN::Git::Editor SVN::Git::Fetcher + Git::SVN::Migration Git::SVN::Log Git::SVN), + __PACKAGE__) { + *{"${package}::$_"} = \&{"Git::$_"}; + } + } +} + +my ($SVN); + +$sha1 = qr/[a-f\d]{40}/; +$sha1_short = qr/[a-f\d]{4,40}/; +my ($_stdin, $_help, $_edit, + $_message, $_file, $_branch_dest, + $_template, $_shared, + $_version, $_fetch_all, $_no_rebase, $_fetch_parent, + $_merge, $_strategy, $_dry_run, $_local, + $_prefix, $_no_checkout, $_url, $_verbose, + $_git_format, $_commit_url, $_tag, $_merge_info); +$Git::SVN::_follow_parent = 1; +$_q ||= 0; +my %remote_opts = ( 'username=s' => \$Git::SVN::Prompt::_username, + 'config-dir=s' => \$Git::SVN::Ra::config_dir, + 'no-auth-cache' => \$Git::SVN::Prompt::_no_auth_cache, + 'ignore-paths=s' => \$SVN::Git::Fetcher::_ignore_regex ); +my %fc_opts = ( 'follow-parent|follow!' => \$Git::SVN::_follow_parent, + 'authors-file|A=s' => \$_authors, + 'authors-prog=s' => \$_authors_prog, + 'repack:i' => \$Git::SVN::_repack, + 'noMetadata' => \$Git::SVN::_no_metadata, + 'useSvmProps' => \$Git::SVN::_use_svm_props, + 'useSvnsyncProps' => \$Git::SVN::_use_svnsync_props, + 'log-window-size=i' => \$Git::SVN::Ra::_log_window_size, + 'no-checkout' => \$_no_checkout, + 'quiet|q+' => \$_q, + 'repack-flags|repack-args|repack-opts=s' => + \$Git::SVN::_repack_flags, + 'use-log-author' => \$Git::SVN::_use_log_author, + 'add-author-from' => \$Git::SVN::_add_author_from, + 'localtime' => \$Git::SVN::_localtime, + %remote_opts ); + +my ($_trunk, @_tags, @_branches, $_stdlayout); +my %icv; +my %init_opts = ( 'template=s' => \$_template, 'shared:s' => \$_shared, + 'trunk|T=s' => \$_trunk, 'tags|t=s@' => \@_tags, + 'branches|b=s@' => \@_branches, 'prefix=s' => \$_prefix, + 'stdlayout|s' => \$_stdlayout, + 'minimize-url|m!' => \$Git::SVN::_minimize_url, + 'no-metadata' => sub { $icv{noMetadata} = 1 }, + 'use-svm-props' => sub { $icv{useSvmProps} = 1 }, + 'use-svnsync-props' => sub { $icv{useSvnsyncProps} = 1 }, + 'rewrite-root=s' => sub { $icv{rewriteRoot} = $_[1] }, + 'rewrite-uuid=s' => sub { $icv{rewriteUUID} = $_[1] }, + %remote_opts ); +my %cmt_opts = ( 'edit|e' => \$_edit, + 'rmdir' => \$SVN::Git::Editor::_rmdir, + 'find-copies-harder' => \$SVN::Git::Editor::_find_copies_harder, + 'l=i' => \$SVN::Git::Editor::_rename_limit, + 'copy-similarity|C=i'=> \$SVN::Git::Editor::_cp_similarity +); + +my %cmd = ( + fetch => [ \&cmd_fetch, "Download new revisions from SVN", + { 'revision|r=s' => \$_revision, + 'fetch-all|all' => \$_fetch_all, + 'parent|p' => \$_fetch_parent, + %fc_opts } ], + clone => [ \&cmd_clone, "Initialize and fetch revisions", + { 'revision|r=s' => \$_revision, + %fc_opts, %init_opts } ], + init => [ \&cmd_init, "Initialize a repo for tracking" . + " (requires URL argument)", + \%init_opts ], + 'multi-init' => [ \&cmd_multi_init, + "Deprecated alias for ". + "'$0 init -T -b -t'", + \%init_opts ], + dcommit => [ \&cmd_dcommit, + 'Commit several diffs to merge with upstream', + { 'merge|m|M' => \$_merge, + 'strategy|s=s' => \$_strategy, + 'verbose|v' => \$_verbose, + 'dry-run|n' => \$_dry_run, + 'fetch-all|all' => \$_fetch_all, + 'commit-url=s' => \$_commit_url, + 'revision|r=i' => \$_revision, + 'no-rebase' => \$_no_rebase, + 'mergeinfo=s' => \$_merge_info, + %cmt_opts, %fc_opts } ], + branch => [ \&cmd_branch, + 'Create a branch in the SVN repository', + { 'message|m=s' => \$_message, + 'destination|d=s' => \$_branch_dest, + 'dry-run|n' => \$_dry_run, + 'tag|t' => \$_tag, + 'username=s' => \$Git::SVN::Prompt::_username, + 'commit-url=s' => \$_commit_url } ], + tag => [ sub { $_tag = 1; cmd_branch(@_) }, + 'Create a tag in the SVN repository', + { 'message|m=s' => \$_message, + 'destination|d=s' => \$_branch_dest, + 'dry-run|n' => \$_dry_run, + 'username=s' => \$Git::SVN::Prompt::_username, + 'commit-url=s' => \$_commit_url } ], + 'set-tree' => [ \&cmd_set_tree, + "Set an SVN repository to a git tree-ish", + { 'stdin' => \$_stdin, %cmt_opts, %fc_opts, } ], + 'create-ignore' => [ \&cmd_create_ignore, + 'Create a .gitignore per svn:ignore', + { 'revision|r=i' => \$_revision + } ], + 'mkdirs' => [ \&cmd_mkdirs , + "recreate empty directories after a checkout", + { 'revision|r=i' => \$_revision } ], + 'propget' => [ \&cmd_propget, + 'Print the value of a property on a file or directory', + { 'revision|r=i' => \$_revision } ], + 'proplist' => [ \&cmd_proplist, + 'List all properties of a file or directory', + { 'revision|r=i' => \$_revision } ], + 'show-ignore' => [ \&cmd_show_ignore, "Show svn:ignore listings", + { 'revision|r=i' => \$_revision + } ], + 'show-externals' => [ \&cmd_show_externals, "Show svn:externals listings", + { 'revision|r=i' => \$_revision + } ], + 'multi-fetch' => [ \&cmd_multi_fetch, + "Deprecated alias for $0 fetch --all", + { 'revision|r=s' => \$_revision, %fc_opts } ], + 'migrate' => [ sub { }, + # no-op, we automatically run this anyways, + 'Migrate configuration/metadata/layout from + previous versions of git-svn', + { 'minimize' => \$Git::SVN::Migration::_minimize, + %remote_opts } ], + 'log' => [ \&Git::SVN::Log::cmd_show_log, 'Show commit logs', + { 'limit=i' => \$Git::SVN::Log::limit, + 'revision|r=s' => \$_revision, + 'verbose|v' => \$Git::SVN::Log::verbose, + 'incremental' => \$Git::SVN::Log::incremental, + 'oneline' => \$Git::SVN::Log::oneline, + 'show-commit' => \$Git::SVN::Log::show_commit, + 'non-recursive' => \$Git::SVN::Log::non_recursive, + 'authors-file|A=s' => \$_authors, + 'color' => \$Git::SVN::Log::color, + 'pager=s' => \$Git::SVN::Log::pager + } ], + 'find-rev' => [ \&cmd_find_rev, + "Translate between SVN revision numbers and tree-ish", + {} ], + 'rebase' => [ \&cmd_rebase, "Fetch and rebase your working directory", + { 'merge|m|M' => \$_merge, + 'verbose|v' => \$_verbose, + 'strategy|s=s' => \$_strategy, + 'local|l' => \$_local, + 'fetch-all|all' => \$_fetch_all, + 'dry-run|n' => \$_dry_run, + %fc_opts } ], + 'commit-diff' => [ \&cmd_commit_diff, + 'Commit a diff between two trees', + { 'message|m=s' => \$_message, + 'file|F=s' => \$_file, + 'revision|r=s' => \$_revision, + %cmt_opts } ], + 'info' => [ \&cmd_info, + "Show info about the latest SVN revision + on the current branch", + { 'url' => \$_url, } ], + 'blame' => [ \&Git::SVN::Log::cmd_blame, + "Show what revision and author last modified each line of a file", + { 'git-format' => \$_git_format } ], + 'reset' => [ \&cmd_reset, + "Undo fetches back to the specified SVN revision", + { 'revision|r=s' => \$_revision, + 'parent|p' => \$_fetch_parent } ], + 'gc' => [ \&cmd_gc, + "Compress unhandled.log files in .git/svn and remove " . + "index files in .git/svn", + {} ], +); + +my $cmd; +for (my $i = 0; $i < @ARGV; $i++) { + if (defined $cmd{$ARGV[$i]}) { + $cmd = $ARGV[$i]; + splice @ARGV, $i, 1; + last; + } elsif ($ARGV[$i] eq 'help') { + $cmd = $ARGV[$i+1]; + usage(0); + } +}; + +# make sure we're always running at the top-level working directory +unless ($cmd && $cmd =~ /(?:clone|init|multi-init)$/) { + unless (-d $ENV{GIT_DIR}) { + if ($git_dir_user_set) { + die "GIT_DIR=$ENV{GIT_DIR} explicitly set, ", + "but it is not a directory\n"; + } + my $git_dir = delete $ENV{GIT_DIR}; + my $cdup = undef; + git_cmd_try { + $cdup = command_oneline(qw/rev-parse --show-cdup/); + $git_dir = '.' unless ($cdup); + chomp $cdup if ($cdup); + $cdup = "." unless ($cdup && length $cdup); + } "Already at toplevel, but $git_dir not found\n"; + chdir $cdup or die "Unable to chdir up to '$cdup'\n"; + unless (-d $git_dir) { + die "$git_dir still not found after going to ", + "'$cdup'\n"; + } + $ENV{GIT_DIR} = $git_dir; + } + $_repository = Git->repository(Repository => $ENV{GIT_DIR}); +} + +my %opts = %{$cmd{$cmd}->[2]} if (defined $cmd); + +read_git_config(\%opts); +if ($cmd && ($cmd eq 'log' || $cmd eq 'blame')) { + Getopt::Long::Configure('pass_through'); +} +my $rv = GetOptions(%opts, 'help|H|h' => \$_help, 'version|V' => \$_version, + 'minimize-connections' => \$Git::SVN::Migration::_minimize, + 'id|i=s' => \$Git::SVN::default_ref_id, + 'svn-remote|remote|R=s' => sub { + $Git::SVN::no_reuse_existing = 1; + $Git::SVN::default_repo_id = $_[1] }); +exit 1 if (!$rv && $cmd && $cmd ne 'log'); + +usage(0) if $_help; +version() if $_version; +usage(1) unless defined $cmd; +load_authors() if $_authors; +if (defined $_authors_prog) { + $_authors_prog = "'" . File::Spec->rel2abs($_authors_prog) . "'"; +} + +unless ($cmd =~ /^(?:clone|init|multi-init|commit-diff)$/) { + Git::SVN::Migration::migration_check(); +} +Git::SVN::init_vars(); +eval { + Git::SVN::verify_remotes_sanity(); + $cmd{$cmd}->[0]->(@ARGV); +}; +fatal $@ if $@; +post_fetch_checkout(); +exit 0; + +####################### primary functions ###################### +sub usage { + my $exit = shift || 0; + my $fd = $exit ? \*STDERR : \*STDOUT; + print $fd <<""; +git-svn - bidirectional operations between a single Subversion tree and git +Usage: git svn [options] [arguments]\n + + print $fd "Available commands:\n" unless $cmd; + + foreach (sort keys %cmd) { + next if $cmd && $cmd ne $_; + next if /^multi-/; # don't show deprecated commands + print $fd ' ',pack('A17',$_),$cmd{$_}->[1],"\n"; + foreach (sort keys %{$cmd{$_}->[2]}) { + # mixed-case options are for .git/config only + next if /[A-Z]/ && /^[a-z]+$/i; + # prints out arguments as they should be passed: + my $x = s#[:=]s$## ? '' : s#[:=]i$## ? '' : ''; + print $fd ' ' x 21, join(', ', map { length $_ > 1 ? + "--$_" : "-$_" } + split /\|/,$_)," $x\n"; + } + } + print $fd <<""; +\nGIT_SVN_ID may be set in the environment or via the --id/-i switch to an +arbitrary identifier if you're tracking multiple SVN branches/repositories in +one git repository and want to keep them separate. See git-svn(1) for more +information. + + exit $exit; +} + +sub version { + ::_req_svn(); + print "git-svn version $VERSION (svn $SVN::Core::VERSION)\n"; + exit 0; +} + +sub do_git_init_db { + unless (-d $ENV{GIT_DIR}) { + my @init_db = ('init'); + push @init_db, "--template=$_template" if defined $_template; + if (defined $_shared) { + if ($_shared =~ /[a-z]/) { + push @init_db, "--shared=$_shared"; + } else { + push @init_db, "--shared"; + } + } + command_noisy(@init_db); + $_repository = Git->repository(Repository => ".git"); + } + my $set; + my $pfx = "svn-remote.$Git::SVN::default_repo_id"; + foreach my $i (keys %icv) { + die "'$set' and '$i' cannot both be set\n" if $set; + next unless defined $icv{$i}; + command_noisy('config', "$pfx.$i", $icv{$i}); + $set = $i; + } + my $ignore_regex = \$SVN::Git::Fetcher::_ignore_regex; + command_noisy('config', "$pfx.ignore-paths", $$ignore_regex) + if defined $$ignore_regex; +} + +sub init_subdir { + my $repo_path = shift or return; + mkpath([$repo_path]) unless -d $repo_path; + chdir $repo_path or die "Couldn't chdir to $repo_path: $!\n"; + $ENV{GIT_DIR} = '.git'; + $_repository = Git->repository(Repository => $ENV{GIT_DIR}); +} + +sub cmd_clone { + my ($url, $path) = @_; + if (!defined $path && + (defined $_trunk || @_branches || @_tags || + defined $_stdlayout) && + $url !~ m#^[a-z\+]+://#) { + $path = $url; + } + $path = basename($url) if !defined $path || !length $path; + my $authors_absolute = $_authors ? File::Spec->rel2abs($_authors) : ""; + cmd_init($url, $path); + command_oneline('config', 'svn.authorsfile', $authors_absolute) + if $_authors; + Git::SVN::fetch_all($Git::SVN::default_repo_id); +} + +sub cmd_init { + if (defined $_stdlayout) { + $_trunk = 'trunk' if (!defined $_trunk); + @_tags = 'tags' if (! @_tags); + @_branches = 'branches' if (! @_branches); + } + if (defined $_trunk || @_branches || @_tags) { + return cmd_multi_init(@_); + } + my $url = shift or die "SVN repository location required ", + "as a command-line argument\n"; + $url = canonicalize_url($url); + init_subdir(@_); + do_git_init_db(); + + if ($Git::SVN::_minimize_url eq 'unset') { + $Git::SVN::_minimize_url = 0; + } + + Git::SVN->init($url); +} + +sub cmd_fetch { + if (grep /^\d+=./, @_) { + die "'=' fetch arguments are ", + "no longer supported.\n"; + } + my ($remote) = @_; + if (@_ > 1) { + die "Usage: $0 fetch [--all] [--parent] [svn-remote]\n"; + } + $Git::SVN::no_reuse_existing = undef; + if ($_fetch_parent) { + my ($url, $rev, $uuid, $gs) = working_head_info('HEAD'); + unless ($gs) { + die "Unable to determine upstream SVN information from ", + "working tree history\n"; + } + # just fetch, don't checkout. + $_no_checkout = 'true'; + $_fetch_all ? $gs->fetch_all : $gs->fetch; + } elsif ($_fetch_all) { + cmd_multi_fetch(); + } else { + $remote ||= $Git::SVN::default_repo_id; + Git::SVN::fetch_all($remote, Git::SVN::read_all_remotes()); + } +} + +sub cmd_set_tree { + my (@commits) = @_; + if ($_stdin || !@commits) { + print "Reading from stdin...\n"; + @commits = (); + while () { + if (/\b($sha1_short)\b/o) { + unshift @commits, $1; + } + } + } + my @revs; + foreach my $c (@commits) { + my @tmp = command('rev-parse',$c); + if (scalar @tmp == 1) { + push @revs, $tmp[0]; + } elsif (scalar @tmp > 1) { + push @revs, reverse(command('rev-list',@tmp)); + } else { + fatal "Failed to rev-parse $c"; + } + } + my $gs = Git::SVN->new; + my ($r_last, $cmt_last) = $gs->last_rev_commit; + $gs->fetch; + if (defined $gs->{last_rev} && $r_last != $gs->{last_rev}) { + fatal "There are new revisions that were fetched ", + "and need to be merged (or acknowledged) ", + "before committing.\nlast rev: $r_last\n", + " current: $gs->{last_rev}"; + } + $gs->set_tree($_) foreach @revs; + print "Done committing ",scalar @revs," revisions to SVN\n"; + unlink $gs->{index}; +} + +sub cmd_dcommit { + my $head = shift; + command_noisy(qw/update-index --refresh/); + git_cmd_try { command_oneline(qw/diff-index --quiet HEAD/) } + 'Cannot dcommit with a dirty index. Commit your changes first, ' + . "or stash them with `git stash'.\n"; + $head ||= 'HEAD'; + + my $old_head; + if ($head ne 'HEAD') { + $old_head = eval { + command_oneline([qw/symbolic-ref -q HEAD/]) + }; + if ($old_head) { + $old_head =~ s{^refs/heads/}{}; + } else { + $old_head = eval { command_oneline(qw/rev-parse HEAD/) }; + } + command(['checkout', $head], STDERR => 0); + } + + my @refs; + my ($url, $rev, $uuid, $gs) = working_head_info('HEAD', \@refs); + unless ($gs) { + die "Unable to determine upstream SVN information from ", + "$head history.\nPerhaps the repository is empty."; + } + + if (defined $_commit_url) { + $url = $_commit_url; + } else { + $url = eval { command_oneline('config', '--get', + "svn-remote.$gs->{repo_id}.commiturl") }; + if (!$url) { + $url = $gs->full_url + } + } + + my $last_rev = $_revision if defined $_revision; + if ($url) { + print "Committing to $url ...\n"; + } + my ($linear_refs, $parents) = linearize_history($gs, \@refs); + if ($_no_rebase && scalar(@$linear_refs) > 1) { + warn "Attempting to commit more than one change while ", + "--no-rebase is enabled.\n", + "If these changes depend on each other, re-running ", + "without --no-rebase may be required." + } + my $expect_url = $url; + Git::SVN::remove_username($expect_url); + while (1) { + my $d = shift @$linear_refs or last; + unless (defined $last_rev) { + (undef, $last_rev, undef) = cmt_metadata("$d~1"); + unless (defined $last_rev) { + fatal "Unable to extract revision information ", + "from commit $d~1"; + } + } + if ($_dry_run) { + print "diff-tree $d~1 $d\n"; + } else { + my $cmt_rev; + my %ed_opts = ( r => $last_rev, + log => get_commit_entry($d)->{log}, + ra => Git::SVN::Ra->new($url), + config => SVN::Core::config_get_config( + $Git::SVN::Ra::config_dir + ), + tree_a => "$d~1", + tree_b => $d, + editor_cb => sub { + print "Committed r$_[0]\n"; + $cmt_rev = $_[0]; + }, + mergeinfo => $_merge_info, + svn_path => ''); + if (!SVN::Git::Editor->new(\%ed_opts)->apply_diff) { + print "No changes\n$d~1 == $d\n"; + } elsif ($parents->{$d} && @{$parents->{$d}}) { + $gs->{inject_parents_dcommit}->{$cmt_rev} = + $parents->{$d}; + } + $_fetch_all ? $gs->fetch_all : $gs->fetch; + $last_rev = $cmt_rev; + next if $_no_rebase; + + # we always want to rebase against the current HEAD, + # not any head that was passed to us + my @diff = command('diff-tree', $d, + $gs->refname, '--'); + my @finish; + if (@diff) { + @finish = rebase_cmd(); + print STDERR "W: $d and ", $gs->refname, + " differ, using @finish:\n", + join("\n", @diff), "\n"; + } else { + print "No changes between current HEAD and ", + $gs->refname, + "\nResetting to the latest ", + $gs->refname, "\n"; + @finish = qw/reset --mixed/; + } + command_noisy(@finish, $gs->refname); + if (@diff) { + @refs = (); + my ($url_, $rev_, $uuid_, $gs_) = + working_head_info('HEAD', \@refs); + my ($linear_refs_, $parents_) = + linearize_history($gs_, \@refs); + if (scalar(@$linear_refs) != + scalar(@$linear_refs_)) { + fatal "# of revisions changed ", + "\nbefore:\n", + join("\n", @$linear_refs), + "\n\nafter:\n", + join("\n", @$linear_refs_), "\n", + 'If you are attempting to commit ', + "merges, try running:\n\t", + 'git rebase --interactive', + '--preserve-merges ', + $gs->refname, + "\nBefore dcommitting"; + } + if ($url_ ne $expect_url) { + if ($url_ eq $gs->metadata_url) { + print + "Accepting rewritten URL:", + " $url_\n"; + } else { + fatal + "URL mismatch after rebase:", + " $url_ != $expect_url"; + } + } + if ($uuid_ ne $uuid) { + fatal "uuid mismatch after rebase: ", + "$uuid_ != $uuid"; + } + # remap parents + my (%p, @l, $i); + for ($i = 0; $i < scalar @$linear_refs; $i++) { + my $new = $linear_refs_->[$i] or next; + $p{$new} = + $parents->{$linear_refs->[$i]}; + push @l, $new; + } + $parents = \%p; + $linear_refs = \@l; + } + } + } + + if ($old_head) { + my $new_head = command_oneline(qw/rev-parse HEAD/); + my $new_is_symbolic = eval { + command_oneline(qw/symbolic-ref -q HEAD/); + }; + if ($new_is_symbolic) { + print "dcommitted the branch ", $head, "\n"; + } else { + print "dcommitted on a detached HEAD because you gave ", + "a revision argument.\n", + "The rewritten commit is: ", $new_head, "\n"; + } + command(['checkout', $old_head], STDERR => 0); + } + + unlink $gs->{index}; +} + +sub cmd_branch { + my ($branch_name, $head) = @_; + + unless (defined $branch_name && length $branch_name) { + die(($_tag ? "tag" : "branch") . " name required\n"); + } + $head ||= 'HEAD'; + + my (undef, $rev, undef, $gs) = working_head_info($head); + my $src = $gs->full_url; + + my $remote = Git::SVN::read_all_remotes()->{$gs->{repo_id}}; + my $allglobs = $remote->{ $_tag ? 'tags' : 'branches' }; + my $glob; + if ($#{$allglobs} == 0) { + $glob = $allglobs->[0]; + } else { + unless(defined $_branch_dest) { + die "Multiple ", + $_tag ? "tag" : "branch", + " paths defined for Subversion repository.\n", + "You must specify where you want to create the ", + $_tag ? "tag" : "branch", + " with the --destination argument.\n"; + } + foreach my $g (@{$allglobs}) { + # SVN::Git::Editor could probably be moved to Git.pm.. + my $re = SVN::Git::Editor::glob2pat($g->{path}->{left}); + if ($_branch_dest =~ /$re/) { + $glob = $g; + last; + } + } + unless (defined $glob) { + my $dest_re = qr/\b\Q$_branch_dest\E\b/; + foreach my $g (@{$allglobs}) { + $g->{path}->{left} =~ /$dest_re/ or next; + if (defined $glob) { + die "Ambiguous destination: ", + $_branch_dest, "\nmatches both '", + $glob->{path}->{left}, "' and '", + $g->{path}->{left}, "'\n"; + } + $glob = $g; + } + unless (defined $glob) { + die "Unknown ", + $_tag ? "tag" : "branch", + " destination $_branch_dest\n"; + } + } + } + my ($lft, $rgt) = @{ $glob->{path} }{qw/left right/}; + my $url; + if (defined $_commit_url) { + $url = $_commit_url; + } else { + $url = eval { command_oneline('config', '--get', + "svn-remote.$gs->{repo_id}.commiturl") }; + if (!$url) { + $url = $remote->{url}; + } + } + my $dst = join '/', $url, $lft, $branch_name, ($rgt || ()); + + if ($dst =~ /^https:/ && $src =~ /^http:/) { + $src=~s/^http:/https:/; + } + + ::_req_svn(); + + my $ctx = SVN::Client->new( + auth => Git::SVN::Ra::_auth_providers(), + log_msg => sub { + ${ $_[0] } = defined $_message + ? $_message + : 'Create ' . ($_tag ? 'tag ' : 'branch ' ) + . $branch_name; + }, + ); + + eval { + $ctx->ls($dst, 'HEAD', 0); + } and die "branch ${branch_name} already exists\n"; + + print "Copying ${src} at r${rev} to ${dst}...\n"; + $ctx->copy($src, $rev, $dst) + unless $_dry_run; + + $gs->fetch_all; +} + +sub cmd_find_rev { + my $revision_or_hash = shift or die "SVN or git revision required ", + "as a command-line argument\n"; + my $result; + if ($revision_or_hash =~ /^r\d+$/) { + my $head = shift; + $head ||= 'HEAD'; + my @refs; + my (undef, undef, $uuid, $gs) = working_head_info($head, \@refs); + unless ($gs) { + die "Unable to determine upstream SVN information from ", + "$head history\n"; + } + my $desired_revision = substr($revision_or_hash, 1); + $result = $gs->rev_map_get($desired_revision, $uuid); + } else { + my (undef, $rev, undef) = cmt_metadata($revision_or_hash); + $result = $rev; + } + print "$result\n" if $result; +} + +sub cmd_rebase { + command_noisy(qw/update-index --refresh/); + my ($url, $rev, $uuid, $gs) = working_head_info('HEAD'); + unless ($gs) { + die "Unable to determine upstream SVN information from ", + "working tree history\n"; + } + if ($_dry_run) { + print "Remote Branch: " . $gs->refname . "\n"; + print "SVN URL: " . $url . "\n"; + return; + } + if (command(qw/diff-index HEAD --/)) { + print STDERR "Cannot rebase with uncommited changes:\n"; + command_noisy('status'); + exit 1; + } + unless ($_local) { + # rebase will checkout for us, so no need to do it explicitly + $_no_checkout = 'true'; + $_fetch_all ? $gs->fetch_all : $gs->fetch; + } + command_noisy(rebase_cmd(), $gs->refname); + $gs->mkemptydirs; +} + +sub cmd_show_ignore { + my ($url, $rev, $uuid, $gs) = working_head_info('HEAD'); + $gs ||= Git::SVN->new; + my $r = (defined $_revision ? $_revision : $gs->ra->get_latest_revnum); + $gs->prop_walk($gs->{path}, $r, sub { + my ($gs, $path, $props) = @_; + print STDOUT "\n# $path\n"; + my $s = $props->{'svn:ignore'} or return; + $s =~ s/[\r\n]+/\n/g; + $s =~ s/^\n+//; + chomp $s; + $s =~ s#^#$path#gm; + print STDOUT "$s\n"; + }); +} + +sub cmd_show_externals { + my ($url, $rev, $uuid, $gs) = working_head_info('HEAD'); + $gs ||= Git::SVN->new; + my $r = (defined $_revision ? $_revision : $gs->ra->get_latest_revnum); + $gs->prop_walk($gs->{path}, $r, sub { + my ($gs, $path, $props) = @_; + print STDOUT "\n# $path\n"; + my $s = $props->{'svn:externals'} or return; + $s =~ s/[\r\n]+/\n/g; + chomp $s; + $s =~ s#^#$path#gm; + print STDOUT "$s\n"; + }); +} + +sub cmd_create_ignore { + my ($url, $rev, $uuid, $gs) = working_head_info('HEAD'); + $gs ||= Git::SVN->new; + my $r = (defined $_revision ? $_revision : $gs->ra->get_latest_revnum); + $gs->prop_walk($gs->{path}, $r, sub { + my ($gs, $path, $props) = @_; + # $path is of the form /path/to/dir/ + $path = '.' . $path; + # SVN can have attributes on empty directories, + # which git won't track + mkpath([$path]) unless -d $path; + my $ignore = $path . '.gitignore'; + my $s = $props->{'svn:ignore'} or return; + open(GITIGNORE, '>', $ignore) + or fatal("Failed to open `$ignore' for writing: $!"); + $s =~ s/[\r\n]+/\n/g; + $s =~ s/^\n+//; + chomp $s; + # Prefix all patterns so that the ignore doesn't apply + # to sub-directories. + $s =~ s#^#/#gm; + print GITIGNORE "$s\n"; + close(GITIGNORE) + or fatal("Failed to close `$ignore': $!"); + command_noisy('add', '-f', $ignore); + }); +} + +sub cmd_mkdirs { + my ($url, $rev, $uuid, $gs) = working_head_info('HEAD'); + $gs ||= Git::SVN->new; + $gs->mkemptydirs($_revision); +} + +sub canonicalize_path { + my ($path) = @_; + my $dot_slash_added = 0; + if (substr($path, 0, 1) ne "/") { + $path = "./" . $path; + $dot_slash_added = 1; + } + # File::Spec->canonpath doesn't collapse x/../y into y (for a + # good reason), so let's do this manually. + $path =~ s#/+#/#g; + $path =~ s#/\.(?:/|$)#/#g; + $path =~ s#/[^/]+/\.\.##g; + $path =~ s#/$##g; + $path =~ s#^\./## if $dot_slash_added; + $path =~ s#^/##; + $path =~ s#^\.$##; + return $path; +} + +sub canonicalize_url { + my ($url) = @_; + $url =~ s#^([^:]+://[^/]*/)(.*)$#$1 . canonicalize_path($2)#e; + return $url; +} + +# get_svnprops(PATH) +# ------------------ +# Helper for cmd_propget and cmd_proplist below. +sub get_svnprops { + my $path = shift; + my ($url, $rev, $uuid, $gs) = working_head_info('HEAD'); + $gs ||= Git::SVN->new; + + # prefix THE PATH by the sub-directory from which the user + # invoked us. + $path = $cmd_dir_prefix . $path; + fatal("No such file or directory: $path") unless -e $path; + my $is_dir = -d $path ? 1 : 0; + $path = $gs->{path} . '/' . $path; + + # canonicalize the path (otherwise libsvn will abort or fail to + # find the file) + $path = canonicalize_path($path); + + my $r = (defined $_revision ? $_revision : $gs->ra->get_latest_revnum); + my $props; + if ($is_dir) { + (undef, undef, $props) = $gs->ra->get_dir($path, $r); + } + else { + (undef, $props) = $gs->ra->get_file($path, $r, undef); + } + return $props; +} + +# cmd_propget (PROP, PATH) +# ------------------------ +# Print the SVN property PROP for PATH. +sub cmd_propget { + my ($prop, $path) = @_; + $path = '.' if not defined $path; + usage(1) if not defined $prop; + my $props = get_svnprops($path); + if (not defined $props->{$prop}) { + fatal("`$path' does not have a `$prop' SVN property."); + } + print $props->{$prop} . "\n"; +} + +# cmd_proplist (PATH) +# ------------------- +# Print the list of SVN properties for PATH. +sub cmd_proplist { + my $path = shift; + $path = '.' if not defined $path; + my $props = get_svnprops($path); + print "Properties on '$path':\n"; + foreach (sort keys %{$props}) { + print " $_\n"; + } +} + +sub cmd_multi_init { + my $url = shift; + unless (defined $_trunk || @_branches || @_tags) { + usage(1); + } + + $_prefix = '' unless defined $_prefix; + if (defined $url) { + $url = canonicalize_url($url); + init_subdir(@_); + } + do_git_init_db(); + if (defined $_trunk) { + $_trunk =~ s#^/+##; + my $trunk_ref = 'refs/remotes/' . $_prefix . 'trunk'; + # try both old-style and new-style lookups: + my $gs_trunk = eval { Git::SVN->new($trunk_ref) }; + unless ($gs_trunk) { + my ($trunk_url, $trunk_path) = + complete_svn_url($url, $_trunk); + $gs_trunk = Git::SVN->init($trunk_url, $trunk_path, + undef, $trunk_ref); + } + } + return unless @_branches || @_tags; + my $ra = $url ? Git::SVN::Ra->new($url) : undef; + foreach my $path (@_branches) { + complete_url_ls_init($ra, $path, '--branches/-b', $_prefix); + } + foreach my $path (@_tags) { + complete_url_ls_init($ra, $path, '--tags/-t', $_prefix.'tags/'); + } +} + +sub cmd_multi_fetch { + $Git::SVN::no_reuse_existing = undef; + my $remotes = Git::SVN::read_all_remotes(); + foreach my $repo_id (sort keys %$remotes) { + if ($remotes->{$repo_id}->{url}) { + Git::SVN::fetch_all($repo_id, $remotes); + } + } +} + +# this command is special because it requires no metadata +sub cmd_commit_diff { + my ($ta, $tb, $url) = @_; + my $usage = "Usage: $0 commit-diff -r ". + " []"; + fatal($usage) if (!defined $ta || !defined $tb); + my $svn_path = ''; + if (!defined $url) { + my $gs = eval { Git::SVN->new }; + if (!$gs) { + fatal("Needed URL or usable git-svn --id in ", + "the command-line\n", $usage); + } + $url = $gs->{url}; + $svn_path = $gs->{path}; + } + unless (defined $_revision) { + fatal("-r|--revision is a required argument\n", $usage); + } + if (defined $_message && defined $_file) { + fatal("Both --message/-m and --file/-F specified ", + "for the commit message.\n", + "I have no idea what you mean"); + } + if (defined $_file) { + $_message = file_to_s($_file); + } else { + $_message ||= get_commit_entry($tb)->{log}; + } + my $ra ||= Git::SVN::Ra->new($url); + my $r = $_revision; + if ($r eq 'HEAD') { + $r = $ra->get_latest_revnum; + } elsif ($r !~ /^\d+$/) { + die "revision argument: $r not understood by git-svn\n"; + } + my %ed_opts = ( r => $r, + log => $_message, + ra => $ra, + tree_a => $ta, + tree_b => $tb, + editor_cb => sub { print "Committed r$_[0]\n" }, + svn_path => $svn_path ); + if (!SVN::Git::Editor->new(\%ed_opts)->apply_diff) { + print "No changes\n$ta == $tb\n"; + } +} + +sub escape_uri_only { + my ($uri) = @_; + my @tmp; + foreach (split m{/}, $uri) { + s/([^~\w.%+-]|%(?![a-fA-F0-9]{2}))/sprintf("%%%02X",ord($1))/eg; + push @tmp, $_; + } + join('/', @tmp); +} + +sub escape_url { + my ($url) = @_; + if ($url =~ m#^([^:]+)://([^/]*)(.*)$#) { + my ($scheme, $domain, $uri) = ($1, $2, escape_uri_only($3)); + $url = "$scheme://$domain$uri"; + } + $url; +} + +sub cmd_info { + my $path = canonicalize_path(defined($_[0]) ? $_[0] : "."); + my $fullpath = canonicalize_path($cmd_dir_prefix . $path); + if (exists $_[1]) { + die "Too many arguments specified\n"; + } + + my ($file_type, $diff_status) = find_file_type_and_diff_status($path); + + if (!$file_type && !$diff_status) { + print STDERR "svn: '$path' is not under version control\n"; + exit 1; + } + + my ($url, $rev, $uuid, $gs) = working_head_info('HEAD'); + unless ($gs) { + die "Unable to determine upstream SVN information from ", + "working tree history\n"; + } + + # canonicalize_path() will return "" to make libsvn 1.5.x happy, + $path = "." if $path eq ""; + + my $full_url = $url . ($fullpath eq "" ? "" : "/$fullpath"); + + if ($_url) { + print escape_url($full_url), "\n"; + return; + } + + my $result = "Path: $path\n"; + $result .= "Name: " . basename($path) . "\n" if $file_type ne "dir"; + $result .= "URL: " . escape_url($full_url) . "\n"; + + eval { + my $repos_root = $gs->repos_root; + Git::SVN::remove_username($repos_root); + $result .= "Repository Root: " . escape_url($repos_root) . "\n"; + }; + if ($@) { + $result .= "Repository Root: (offline)\n"; + } + ::_req_svn(); + $result .= "Repository UUID: $uuid\n" unless $diff_status eq "A" && + ($SVN::Core::VERSION le '1.5.4' || $file_type ne "dir"); + $result .= "Revision: " . ($diff_status eq "A" ? 0 : $rev) . "\n"; + + $result .= "Node Kind: " . + ($file_type eq "dir" ? "directory" : "file") . "\n"; + + my $schedule = $diff_status eq "A" + ? "add" + : ($diff_status eq "D" ? "delete" : "normal"); + $result .= "Schedule: $schedule\n"; + + if ($diff_status eq "A") { + print $result, "\n"; + return; + } + + my ($lc_author, $lc_rev, $lc_date_utc); + my @args = Git::SVN::Log::git_svn_log_cmd($rev, $rev, "--", $fullpath); + my $log = command_output_pipe(@args); + my $esc_color = qr/(?:\033\[(?:(?:\d+;)*\d*)?m)*/; + while (<$log>) { + if (/^${esc_color}author (.+) <[^>]+> (\d+) ([\-\+]?\d+)$/o) { + $lc_author = $1; + $lc_date_utc = Git::SVN::Log::parse_git_date($2, $3); + } elsif (/^${esc_color} (git-svn-id:.+)$/o) { + (undef, $lc_rev, undef) = ::extract_metadata($1); + } + } + close $log; + + Git::SVN::Log::set_local_timezone(); + + $result .= "Last Changed Author: $lc_author\n"; + $result .= "Last Changed Rev: $lc_rev\n"; + $result .= "Last Changed Date: " . + Git::SVN::Log::format_svn_date($lc_date_utc) . "\n"; + + if ($file_type ne "dir") { + my $text_last_updated_date = + ($diff_status eq "D" ? $lc_date_utc : (stat $path)[9]); + $result .= + "Text Last Updated: " . + Git::SVN::Log::format_svn_date($text_last_updated_date) . + "\n"; + my $checksum; + if ($diff_status eq "D") { + my ($fh, $ctx) = + command_output_pipe(qw(cat-file blob), "HEAD:$path"); + if ($file_type eq "link") { + my $file_name = <$fh>; + $checksum = md5sum("link $file_name"); + } else { + $checksum = md5sum($fh); + } + command_close_pipe($fh, $ctx); + } elsif ($file_type eq "link") { + my $file_name = + command(qw(cat-file blob), "HEAD:$path"); + $checksum = + md5sum("link " . $file_name); + } else { + open FILE, "<", $path or die $!; + $checksum = md5sum(\*FILE); + close FILE or die $!; + } + $result .= "Checksum: " . $checksum . "\n"; + } + + print $result, "\n"; +} + +sub cmd_reset { + my $target = shift || $_revision or die "SVN revision required\n"; + $target = $1 if $target =~ /^r(\d+)$/; + $target =~ /^\d+$/ or die "Numeric SVN revision expected\n"; + my ($url, $rev, $uuid, $gs) = working_head_info('HEAD'); + unless ($gs) { + die "Unable to determine upstream SVN information from ". + "history\n"; + } + my ($r, $c) = $gs->find_rev_before($target, not $_fetch_parent); + die "Cannot find SVN revision $target\n" unless defined($c); + $gs->rev_map_set($r, $c, 'reset', $uuid); + print "r$r = $c ($gs->{ref_id})\n"; +} + +sub cmd_gc { + if (!$can_compress) { + warn "Compress::Zlib could not be found; unhandled.log " . + "files will not be compressed.\n"; + } + find({ wanted => \&gc_directory, no_chdir => 1}, "$ENV{GIT_DIR}/svn"); +} + +########################### utility functions ######################### + +sub rebase_cmd { + my @cmd = qw/rebase/; + push @cmd, '-v' if $_verbose; + push @cmd, qw/--merge/ if $_merge; + push @cmd, "--strategy=$_strategy" if $_strategy; + @cmd; +} + +sub post_fetch_checkout { + return if $_no_checkout; + my $gs = $Git::SVN::_head or return; + return if verify_ref('refs/heads/master^0'); + + # look for "trunk" ref if it exists + my $remote = Git::SVN::read_all_remotes()->{$gs->{repo_id}}; + my $fetch = $remote->{fetch}; + if ($fetch) { + foreach my $p (keys %$fetch) { + basename($fetch->{$p}) eq 'trunk' or next; + $gs = Git::SVN->new($fetch->{$p}, $gs->{repo_id}, $p); + last; + } + } + + my $valid_head = verify_ref('HEAD^0'); + command_noisy(qw(update-ref refs/heads/master), $gs->refname); + return if ($valid_head || !verify_ref('HEAD^0')); + + return if $ENV{GIT_DIR} !~ m#^(?:.*/)?\.git$#; + my $index = $ENV{GIT_INDEX_FILE} || "$ENV{GIT_DIR}/index"; + return if -f $index; + + return if command_oneline(qw/rev-parse --is-inside-work-tree/) eq 'false'; + return if command_oneline(qw/rev-parse --is-inside-git-dir/) eq 'true'; + command_noisy(qw/read-tree -m -u -v HEAD HEAD/); + print STDERR "Checked out HEAD:\n ", + $gs->full_url, " r", $gs->last_rev, "\n"; + $gs->mkemptydirs($gs->last_rev); +} + +sub complete_svn_url { + my ($url, $path) = @_; + $path =~ s#/+$##; + if ($path !~ m#^[a-z\+]+://#) { + if (!defined $url || $url !~ m#^[a-z\+]+://#) { + fatal("E: '$path' is not a complete URL ", + "and a separate URL is not specified"); + } + return ($url, $path); + } + return ($path, ''); +} + +sub complete_url_ls_init { + my ($ra, $repo_path, $switch, $pfx) = @_; + unless ($repo_path) { + print STDERR "W: $switch not specified\n"; + return; + } + $repo_path =~ s#/+$##; + if ($repo_path =~ m#^[a-z\+]+://#) { + $ra = Git::SVN::Ra->new($repo_path); + $repo_path = ''; + } else { + $repo_path =~ s#^/+##; + unless ($ra) { + fatal("E: '$repo_path' is not a complete URL ", + "and a separate URL is not specified"); + } + } + my $url = $ra->{url}; + my $gs = Git::SVN->init($url, undef, undef, undef, 1); + my $k = "svn-remote.$gs->{repo_id}.url"; + my $orig_url = eval { command_oneline(qw/config --get/, $k) }; + if ($orig_url && ($orig_url ne $gs->{url})) { + die "$k already set: $orig_url\n", + "wanted to set to: $gs->{url}\n"; + } + command_oneline('config', $k, $gs->{url}) unless $orig_url; + my $remote_path = "$gs->{path}/$repo_path"; + $remote_path =~ s{%([0-9A-F]{2})}{chr hex($1)}ieg; + $remote_path =~ s#/+#/#g; + $remote_path =~ s#^/##g; + $remote_path .= "/*" if $remote_path !~ /\*/; + my ($n) = ($switch =~ /^--(\w+)/); + if (length $pfx && $pfx !~ m#/$#) { + die "--prefix='$pfx' must have a trailing slash '/'\n"; + } + command_noisy('config', + '--add', + "svn-remote.$gs->{repo_id}.$n", + "$remote_path:refs/remotes/$pfx*" . + ('/*' x (($remote_path =~ tr/*/*/) - 1)) ); +} + +sub verify_ref { + my ($ref) = @_; + eval { command_oneline([ 'rev-parse', '--verify', $ref ], + { STDERR => 0 }); }; +} + +sub get_tree_from_treeish { + my ($treeish) = @_; + # $treeish can be a symbolic ref, too: + my $type = command_oneline(qw/cat-file -t/, $treeish); + my $expected; + while ($type eq 'tag') { + ($treeish, $type) = command(qw/cat-file tag/, $treeish); + } + if ($type eq 'commit') { + $expected = (grep /^tree /, command(qw/cat-file commit/, + $treeish))[0]; + ($expected) = ($expected =~ /^tree ($sha1)$/o); + die "Unable to get tree from $treeish\n" unless $expected; + } elsif ($type eq 'tree') { + $expected = $treeish; + } else { + die "$treeish is a $type, expected tree, tag or commit\n"; + } + return $expected; +} + +sub get_commit_entry { + my ($treeish) = shift; + my %log_entry = ( log => '', tree => get_tree_from_treeish($treeish) ); + my $commit_editmsg = "$ENV{GIT_DIR}/COMMIT_EDITMSG"; + my $commit_msg = "$ENV{GIT_DIR}/COMMIT_MSG"; + open my $log_fh, '>', $commit_editmsg or croak $!; + + my $type = command_oneline(qw/cat-file -t/, $treeish); + if ($type eq 'commit' || $type eq 'tag') { + my ($msg_fh, $ctx) = command_output_pipe('cat-file', + $type, $treeish); + my $in_msg = 0; + my $author; + my $saw_from = 0; + my $msgbuf = ""; + while (<$msg_fh>) { + if (!$in_msg) { + $in_msg = 1 if (/^\s*$/); + $author = $1 if (/^author (.*>)/); + } elsif (/^git-svn-id: /) { + # skip this for now, we regenerate the + # correct one on re-fetch anyways + # TODO: set *:merge properties or like... + } else { + if (/^From:/ || /^Signed-off-by:/) { + $saw_from = 1; + } + $msgbuf .= $_; + } + } + $msgbuf =~ s/\s+$//s; + if ($Git::SVN::_add_author_from && defined($author) + && !$saw_from) { + $msgbuf .= "\n\nFrom: $author"; + } + print $log_fh $msgbuf or croak $!; + command_close_pipe($msg_fh, $ctx); + } + close $log_fh or croak $!; + + if ($_edit || ($type eq 'tree')) { + chomp(my $editor = command_oneline(qw(var GIT_EDITOR))); + system('sh', '-c', $editor.' "$@"', $editor, $commit_editmsg); + } + rename $commit_editmsg, $commit_msg or croak $!; + { + require Encode; + # SVN requires messages to be UTF-8 when entering the repo + local $/; + open $log_fh, '<', $commit_msg or croak $!; + binmode $log_fh; + chomp($log_entry{log} = <$log_fh>); + + my $enc = Git::config('i18n.commitencoding') || 'UTF-8'; + my $msg = $log_entry{log}; + + eval { $msg = Encode::decode($enc, $msg, 1) }; + if ($@) { + die "Could not decode as $enc:\n", $msg, + "\nPerhaps you need to set i18n.commitencoding\n"; + } + + eval { $msg = Encode::encode('UTF-8', $msg, 1) }; + die "Could not encode as UTF-8:\n$msg\n" if $@; + + $log_entry{log} = $msg; + + close $log_fh or croak $!; + } + unlink $commit_msg; + \%log_entry; +} + +sub s_to_file { + my ($str, $file, $mode) = @_; + open my $fd,'>',$file or croak $!; + print $fd $str,"\n" or croak $!; + close $fd or croak $!; + chmod ($mode &~ umask, $file) if (defined $mode); +} + +sub file_to_s { + my $file = shift; + open my $fd,'<',$file or croak "$!: file: $file\n"; + local $/; + my $ret = <$fd>; + close $fd or croak $!; + $ret =~ s/\s*$//s; + return $ret; +} + +# ' = real-name ' mapping based on git-svnimport: +sub load_authors { + open my $authors, '<', $_authors or die "Can't open $_authors $!\n"; + my $log = $cmd eq 'log'; + while (<$authors>) { + chomp; + next unless /^(.+?|\(no author\))\s*=\s*(.+?)\s*<(.+)>\s*$/; + my ($user, $name, $email) = ($1, $2, $3); + if ($log) { + $Git::SVN::Log::rusers{"$name <$email>"} = $user; + } else { + $users{$user} = [$name, $email]; + } + } + close $authors or croak $!; +} + +# convert GetOpt::Long specs for use by git-config +sub read_git_config { + my $opts = shift; + my @config_only; + foreach my $o (keys %$opts) { + # if we have mixedCase and a long option-only, then + # it's a config-only variable that we don't need for + # the command-line. + push @config_only, $o if ($o =~ /[A-Z]/ && $o =~ /^[a-z]+$/i); + my $v = $opts->{$o}; + my ($key) = ($o =~ /^([a-zA-Z\-]+)/); + $key =~ s/-//g; + my $arg = 'git config'; + $arg .= ' --int' if ($o =~ /[:=]i$/); + $arg .= ' --bool' if ($o !~ /[:=][sfi]$/); + if (ref $v eq 'ARRAY') { + chomp(my @tmp = `$arg --get-all svn.$key`); + @$v = @tmp if @tmp; + } else { + chomp(my $tmp = `$arg --get svn.$key`); + if ($tmp && !($arg =~ / --bool/ && $tmp eq 'false')) { + $$v = $tmp; + } + } + } + delete @$opts{@config_only} if @config_only; +} + +sub extract_metadata { + my $id = shift or return (undef, undef, undef); + my ($url, $rev, $uuid) = ($id =~ /^\s*git-svn-id:\s+(.*)\@(\d+) + \s([a-f\d\-]+)$/ix); + if (!defined $rev || !$uuid || !$url) { + # some of the original repositories I made had + # identifiers like this: + ($rev, $uuid) = ($id =~/^\s*git-svn-id:\s(\d+)\@([a-f\d\-]+)/i); + } + return ($url, $rev, $uuid); +} + +sub cmt_metadata { + return extract_metadata((grep(/^git-svn-id: /, + command(qw/cat-file commit/, shift)))[-1]); +} + +sub cmt_sha2rev_batch { + my %s2r; + my ($pid, $in, $out, $ctx) = command_bidi_pipe(qw/cat-file --batch/); + my $list = shift; + + foreach my $sha (@{$list}) { + my $first = 1; + my $size = 0; + print $out $sha, "\n"; + + while (my $line = <$in>) { + if ($first && $line =~ /^[[:xdigit:]]{40}\smissing$/) { + last; + } elsif ($first && + $line =~ /^[[:xdigit:]]{40}\scommit\s(\d+)$/) { + $first = 0; + $size = $1; + next; + } elsif ($line =~ /^(git-svn-id: )/) { + my (undef, $rev, undef) = + extract_metadata($line); + $s2r{$sha} = $rev; + } + + $size -= length($line); + last if ($size == 0); + } + } + + command_close_bidi_pipe($pid, $in, $out, $ctx); + + return \%s2r; +} + +sub working_head_info { + my ($head, $refs) = @_; + my @args = qw/log --no-color --no-decorate --first-parent + --pretty=medium/; + my ($fh, $ctx) = command_output_pipe(@args, $head); + my $hash; + my %max; + while (<$fh>) { + if ( m{^commit ($::sha1)$} ) { + unshift @$refs, $hash if $hash and $refs; + $hash = $1; + next; + } + next unless s{^\s*(git-svn-id:)}{$1}; + my ($url, $rev, $uuid) = extract_metadata($_); + if (defined $url && defined $rev) { + next if $max{$url} and $max{$url} < $rev; + if (my $gs = Git::SVN->find_by_url($url)) { + my $c = $gs->rev_map_get($rev, $uuid); + if ($c && $c eq $hash) { + close $fh; # break the pipe + return ($url, $rev, $uuid, $gs); + } else { + $max{$url} ||= $gs->rev_map_max; + } + } + } + } + command_close_pipe($fh, $ctx); + (undef, undef, undef, undef); +} + +sub read_commit_parents { + my ($parents, $c) = @_; + chomp(my $p = command_oneline(qw/rev-list --parents -1/, $c)); + $p =~ s/^($c)\s*// or die "rev-list --parents -1 $c failed!\n"; + @{$parents->{$c}} = split(/ /, $p); +} + +sub linearize_history { + my ($gs, $refs) = @_; + my %parents; + foreach my $c (@$refs) { + read_commit_parents(\%parents, $c); + } + + my @linear_refs; + my %skip = (); + my $last_svn_commit = $gs->last_commit; + foreach my $c (reverse @$refs) { + next if $c eq $last_svn_commit; + last if $skip{$c}; + + unshift @linear_refs, $c; + $skip{$c} = 1; + + # we only want the first parent to diff against for linear + # history, we save the rest to inject when we finalize the + # svn commit + my $fp_a = verify_ref("$c~1"); + my $fp_b = shift @{$parents{$c}} if $parents{$c}; + if (!$fp_a || !$fp_b) { + die "Commit $c\n", + "has no parent commit, and therefore ", + "nothing to diff against.\n", + "You should be working from a repository ", + "originally created by git-svn\n"; + } + if ($fp_a ne $fp_b) { + die "$c~1 = $fp_a, however parsing commit $c ", + "revealed that:\n$c~1 = $fp_b\nBUG!\n"; + } + + foreach my $p (@{$parents{$c}}) { + $skip{$p} = 1; + } + } + (\@linear_refs, \%parents); +} + +sub find_file_type_and_diff_status { + my ($path) = @_; + return ('dir', '') if $path eq ''; + + my $diff_output = + command_oneline(qw(diff --cached --name-status --), $path) || ""; + my $diff_status = (split(' ', $diff_output))[0] || ""; + + my $ls_tree = command_oneline(qw(ls-tree HEAD), $path) || ""; + + return (undef, undef) if !$diff_status && !$ls_tree; + + if ($diff_status eq "A") { + return ("link", $diff_status) if -l $path; + return ("dir", $diff_status) if -d $path; + return ("file", $diff_status); + } + + my $mode = (split(' ', $ls_tree))[0] || ""; + + return ("link", $diff_status) if $mode eq "120000"; + return ("dir", $diff_status) if $mode eq "040000"; + return ("file", $diff_status); +} + +sub md5sum { + my $arg = shift; + my $ref = ref $arg; + my $md5 = Digest::MD5->new(); + if ($ref eq 'GLOB' || $ref eq 'IO::File' || $ref eq 'File::Temp') { + $md5->addfile($arg) or croak $!; + } elsif ($ref eq 'SCALAR') { + $md5->add($$arg) or croak $!; + } elsif (!$ref) { + $md5->add($arg) or croak $!; + } else { + ::fatal "Can't provide MD5 hash for unknown ref type: '", $ref, "'"; + } + return $md5->hexdigest(); +} + +sub gc_directory { + if ($can_compress && -f $_ && basename($_) eq "unhandled.log") { + my $out_filename = $_ . ".gz"; + open my $in_fh, "<", $_ or die "Unable to open $_: $!\n"; + binmode $in_fh; + my $gz = Compress::Zlib::gzopen($out_filename, "ab") or + die "Unable to open $out_filename: $!\n"; + + my $res; + while ($res = sysread($in_fh, my $str, 1024)) { + $gz->gzwrite($str) or + die "Unable to write: ".$gz->gzerror()."!\n"; + } + unlink $_ or die "unlink $File::Find::name: $!\n"; + } elsif (-f $_ && basename($_) eq "index") { + unlink $_ or die "unlink $_: $!\n"; + } +} + +package Git::SVN; +use strict; +use warnings; +use Fcntl qw/:DEFAULT :seek/; +use constant rev_map_fmt => 'NH40'; +use vars qw/$default_repo_id $default_ref_id $_no_metadata $_follow_parent + $_repack $_repack_flags $_use_svm_props $_head + $_use_svnsync_props $no_reuse_existing $_minimize_url + $_use_log_author $_add_author_from $_localtime/; +use Carp qw/croak/; +use File::Path qw/mkpath/; +use File::Copy qw/copy/; +use IPC::Open3; +use Memoize; # core since 5.8.0, Jul 2002 +use Memoize::Storable; + +my ($_gc_nr, $_gc_period); + +# properties that we do not log: +my %SKIP_PROP; +BEGIN { + %SKIP_PROP = map { $_ => 1 } qw/svn:wc:ra_dav:version-url + svn:special svn:executable + svn:entry:committed-rev + svn:entry:last-author + svn:entry:uuid + svn:entry:committed-date/; + + # some options are read globally, but can be overridden locally + # per [svn-remote "..."] section. Command-line options will *NOT* + # override options set in an [svn-remote "..."] section + no strict 'refs'; + for my $option (qw/follow_parent no_metadata use_svm_props + use_svnsync_props/) { + my $key = $option; + $key =~ tr/_//d; + my $prop = "-$option"; + *$option = sub { + my ($self) = @_; + return $self->{$prop} if exists $self->{$prop}; + my $k = "svn-remote.$self->{repo_id}.$key"; + eval { command_oneline(qw/config --get/, $k) }; + if ($@) { + $self->{$prop} = ${"Git::SVN::_$option"}; + } else { + my $v = command_oneline(qw/config --bool/,$k); + $self->{$prop} = $v eq 'false' ? 0 : 1; + } + return $self->{$prop}; + } + } +} + + +my (%LOCKFILES, %INDEX_FILES); +END { + unlink keys %LOCKFILES if %LOCKFILES; + unlink keys %INDEX_FILES if %INDEX_FILES; +} + +sub resolve_local_globs { + my ($url, $fetch, $glob_spec) = @_; + return unless defined $glob_spec; + my $ref = $glob_spec->{ref}; + my $path = $glob_spec->{path}; + foreach (command(qw#for-each-ref --format=%(refname) refs/#)) { + next unless m#^$ref->{regex}$#; + my $p = $1; + my $pathname = desanitize_refname($path->full_path($p)); + my $refname = desanitize_refname($ref->full_path($p)); + if (my $existing = $fetch->{$pathname}) { + if ($existing ne $refname) { + die "Refspec conflict:\n", + "existing: $existing\n", + " globbed: $refname\n"; + } + my $u = (::cmt_metadata("$refname"))[0]; + $u =~ s!^\Q$url\E(/|$)!! or die + "$refname: '$url' not found in '$u'\n"; + if ($pathname ne $u) { + warn "W: Refspec glob conflict ", + "(ref: $refname):\n", + "expected path: $pathname\n", + " real path: $u\n", + "Continuing ahead with $u\n"; + next; + } + } else { + $fetch->{$pathname} = $refname; + } + } +} + +sub parse_revision_argument { + my ($base, $head) = @_; + if (!defined $::_revision || $::_revision eq 'BASE:HEAD') { + return ($base, $head); + } + return ($1, $2) if ($::_revision =~ /^(\d+):(\d+)$/); + return ($::_revision, $::_revision) if ($::_revision =~ /^\d+$/); + return ($head, $head) if ($::_revision eq 'HEAD'); + return ($base, $1) if ($::_revision =~ /^BASE:(\d+)$/); + return ($1, $head) if ($::_revision =~ /^(\d+):HEAD$/); + die "revision argument: $::_revision not understood by git-svn\n"; +} + +sub fetch_all { + my ($repo_id, $remotes) = @_; + if (ref $repo_id) { + my $gs = $repo_id; + $repo_id = undef; + $repo_id = $gs->{repo_id}; + } + $remotes ||= read_all_remotes(); + my $remote = $remotes->{$repo_id} or + die "[svn-remote \"$repo_id\"] unknown\n"; + my $fetch = $remote->{fetch}; + my $url = $remote->{url} or die "svn-remote.$repo_id.url not defined\n"; + my (@gs, @globs); + my $ra = Git::SVN::Ra->new($url); + my $uuid = $ra->get_uuid; + my $head = $ra->get_latest_revnum; + + # ignore errors, $head revision may not even exist anymore + eval { $ra->get_log("", $head, 0, 1, 0, 1, sub { $head = $_[1] }) }; + warn "W: $@\n" if $@; + + my $base = defined $fetch ? $head : 0; + + # read the max revs for wildcard expansion (branches/*, tags/*) + foreach my $t (qw/branches tags/) { + defined $remote->{$t} or next; + push @globs, @{$remote->{$t}}; + + my $max_rev = eval { tmp_config(qw/--int --get/, + "svn-remote.$repo_id.${t}-maxRev") }; + if (defined $max_rev && ($max_rev < $base)) { + $base = $max_rev; + } elsif (!defined $max_rev) { + $base = 0; + } + } + + if ($fetch) { + foreach my $p (sort keys %$fetch) { + my $gs = Git::SVN->new($fetch->{$p}, $repo_id, $p); + my $lr = $gs->rev_map_max; + if (defined $lr) { + $base = $lr if ($lr < $base); + } + push @gs, $gs; + } + } + + ($base, $head) = parse_revision_argument($base, $head); + $ra->gs_fetch_loop_common($base, $head, \@gs, \@globs); +} + +sub read_all_remotes { + my $r = {}; + my $use_svm_props = eval { command_oneline(qw/config --bool + svn.useSvmProps/) }; + $use_svm_props = $use_svm_props eq 'true' if $use_svm_props; + my $svn_refspec = qr{\s*(.*?)\s*:\s*(.+?)\s*}; + foreach (grep { s/^svn-remote\.// } command(qw/config -l/)) { + if (m!^(.+)\.fetch=$svn_refspec$!) { + my ($remote, $local_ref, $remote_ref) = ($1, $2, $3); + die("svn-remote.$remote: remote ref '$remote_ref' " + . "must start with 'refs/'\n") + unless $remote_ref =~ m{^refs/}; + $local_ref = uri_decode($local_ref); + $r->{$remote}->{fetch}->{$local_ref} = $remote_ref; + $r->{$remote}->{svm} = {} if $use_svm_props; + } elsif (m!^(.+)\.usesvmprops=\s*(.*)\s*$!) { + $r->{$1}->{svm} = {}; + } elsif (m!^(.+)\.url=\s*(.*)\s*$!) { + $r->{$1}->{url} = $2; + } elsif (m!^(.+)\.(branches|tags)=$svn_refspec$!) { + my ($remote, $t, $local_ref, $remote_ref) = + ($1, $2, $3, $4); + die("svn-remote.$remote: remote ref '$remote_ref' ($t) " + . "must start with 'refs/'\n") + unless $remote_ref =~ m{^refs/}; + $local_ref = uri_decode($local_ref); + my $rs = { + t => $t, + remote => $remote, + path => Git::SVN::GlobSpec->new($local_ref, 1), + ref => Git::SVN::GlobSpec->new($remote_ref, 0) }; + if (length($rs->{ref}->{right}) != 0) { + die "The '*' glob character must be the last ", + "character of '$remote_ref'\n"; + } + push @{ $r->{$remote}->{$t} }, $rs; + } + } + + map { + if (defined $r->{$_}->{svm}) { + my $svm; + eval { + my $section = "svn-remote.$_"; + $svm = { + source => tmp_config('--get', + "$section.svm-source"), + replace => tmp_config('--get', + "$section.svm-replace"), + } + }; + $r->{$_}->{svm} = $svm; + } + } keys %$r; + + $r; +} + +sub init_vars { + $_gc_nr = $_gc_period = 1000; + if (defined $_repack || defined $_repack_flags) { + warn "Repack options are obsolete; they have no effect.\n"; + } +} + +sub verify_remotes_sanity { + return unless -d $ENV{GIT_DIR}; + my %seen; + foreach (command(qw/config -l/)) { + if (m!^svn-remote\.(?:.+)\.fetch=.*:refs/remotes/(\S+)\s*$!) { + if ($seen{$1}) { + die "Remote ref refs/remote/$1 is tracked by", + "\n \"$_\"\nand\n \"$seen{$1}\"\n", + "Please resolve this ambiguity in ", + "your git configuration file before ", + "continuing\n"; + } + $seen{$1} = $_; + } + } +} + +sub find_existing_remote { + my ($url, $remotes) = @_; + return undef if $no_reuse_existing; + my $existing; + foreach my $repo_id (keys %$remotes) { + my $u = $remotes->{$repo_id}->{url} or next; + next if $u ne $url; + $existing = $repo_id; + last; + } + $existing; +} + +sub init_remote_config { + my ($self, $url, $no_write) = @_; + $url =~ s!/+$!!; # strip trailing slash + my $r = read_all_remotes(); + my $existing = find_existing_remote($url, $r); + if ($existing) { + unless ($no_write) { + print STDERR "Using existing ", + "[svn-remote \"$existing\"]\n"; + } + $self->{repo_id} = $existing; + } elsif ($_minimize_url) { + my $min_url = Git::SVN::Ra->new($url)->minimize_url; + $existing = find_existing_remote($min_url, $r); + if ($existing) { + unless ($no_write) { + print STDERR "Using existing ", + "[svn-remote \"$existing\"]\n"; + } + $self->{repo_id} = $existing; + } + if ($min_url ne $url) { + unless ($no_write) { + print STDERR "Using higher level of URL: ", + "$url => $min_url\n"; + } + my $old_path = $self->{path}; + $self->{path} = $url; + $self->{path} =~ s!^\Q$min_url\E(/|$)!!; + if (length $old_path) { + $self->{path} .= "/$old_path"; + } + $url = $min_url; + } + } + my $orig_url; + if (!$existing) { + # verify that we aren't overwriting anything: + $orig_url = eval { + command_oneline('config', '--get', + "svn-remote.$self->{repo_id}.url") + }; + if ($orig_url && ($orig_url ne $url)) { + die "svn-remote.$self->{repo_id}.url already set: ", + "$orig_url\nwanted to set to: $url\n"; + } + } + my ($xrepo_id, $xpath) = find_ref($self->refname); + if (!$no_write && defined $xpath) { + die "svn-remote.$xrepo_id.fetch already set to track ", + "$xpath:", $self->refname, "\n"; + } + unless ($no_write) { + command_noisy('config', + "svn-remote.$self->{repo_id}.url", $url); + $self->{path} =~ s{^/}{}; + $self->{path} =~ s{%([0-9A-F]{2})}{chr hex($1)}ieg; + command_noisy('config', '--add', + "svn-remote.$self->{repo_id}.fetch", + "$self->{path}:".$self->refname); + } + $self->{url} = $url; +} + +sub find_by_url { # repos_root and, path are optional + my ($class, $full_url, $repos_root, $path) = @_; + + return undef unless defined $full_url; + remove_username($full_url); + remove_username($repos_root) if defined $repos_root; + my $remotes = read_all_remotes(); + if (defined $full_url && defined $repos_root && !defined $path) { + $path = $full_url; + $path =~ s#^\Q$repos_root\E(?:/|$)##; + } + foreach my $repo_id (keys %$remotes) { + my $u = $remotes->{$repo_id}->{url} or next; + remove_username($u); + next if defined $repos_root && $repos_root ne $u; + + my $fetch = $remotes->{$repo_id}->{fetch} || {}; + foreach my $t (qw/branches tags/) { + foreach my $globspec (@{$remotes->{$repo_id}->{$t}}) { + resolve_local_globs($u, $fetch, $globspec); + } + } + my $p = $path; + my $rwr = rewrite_root({repo_id => $repo_id}); + my $svm = $remotes->{$repo_id}->{svm} + if defined $remotes->{$repo_id}->{svm}; + unless (defined $p) { + $p = $full_url; + my $z = $u; + my $prefix = ''; + if ($rwr) { + $z = $rwr; + remove_username($z); + } elsif (defined $svm) { + $z = $svm->{source}; + $prefix = $svm->{replace}; + $prefix =~ s#^\Q$u\E(?:/|$)##; + $prefix =~ s#/$##; + } + $p =~ s#^\Q$z\E(?:/|$)#$prefix# or next; + } + foreach my $f (keys %$fetch) { + next if $f ne $p; + return Git::SVN->new($fetch->{$f}, $repo_id, $f); + } + } + undef; +} + +sub init { + my ($class, $url, $path, $repo_id, $ref_id, $no_write) = @_; + my $self = _new($class, $repo_id, $ref_id, $path); + if (defined $url) { + $self->init_remote_config($url, $no_write); + } + $self; +} + +sub find_ref { + my ($ref_id) = @_; + foreach (command(qw/config -l/)) { + next unless m!^svn-remote\.(.+)\.fetch= + \s*(.*?)\s*:\s*(.+?)\s*$!x; + my ($repo_id, $path, $ref) = ($1, $2, $3); + if ($ref eq $ref_id) { + $path = '' if ($path =~ m#^\./?#); + return ($repo_id, $path); + } + } + (undef, undef, undef); +} + +sub new { + my ($class, $ref_id, $repo_id, $path) = @_; + if (defined $ref_id && !defined $repo_id && !defined $path) { + ($repo_id, $path) = find_ref($ref_id); + if (!defined $repo_id) { + die "Could not find a \"svn-remote.*.fetch\" key ", + "in the repository configuration matching: ", + "$ref_id\n"; + } + } + my $self = _new($class, $repo_id, $ref_id, $path); + if (!defined $self->{path} || !length $self->{path}) { + my $fetch = command_oneline('config', '--get', + "svn-remote.$repo_id.fetch", + ":$ref_id\$") or + die "Failed to read \"svn-remote.$repo_id.fetch\" ", + "\":$ref_id\$\" in config\n"; + ($self->{path}, undef) = split(/\s*:\s*/, $fetch); + } + $self->{path} =~ s{/+}{/}g; + $self->{path} =~ s{\A/}{}; + $self->{path} =~ s{/\z}{}; + $self->{url} = command_oneline('config', '--get', + "svn-remote.$repo_id.url") or + die "Failed to read \"svn-remote.$repo_id.url\" in config\n"; + $self->rebuild; + $self; +} + +sub refname { + my ($refname) = $_[0]->{ref_id} ; + + # It cannot end with a slash /, we'll throw up on this because + # SVN can't have directories with a slash in their name, either: + if ($refname =~ m{/$}) { + die "ref: '$refname' ends with a trailing slash, this is ", + "not permitted by git nor Subversion\n"; + } + + # It cannot have ASCII control character space, tilde ~, caret ^, + # colon :, question-mark ?, asterisk *, space, or open bracket [ + # anywhere. + # + # Additionally, % must be escaped because it is used for escaping + # and we want our escaped refname to be reversible + $refname =~ s{([ \%~\^:\?\*\[\t])}{uc sprintf('%%%02x',ord($1))}eg; + + # no slash-separated component can begin with a dot . + # /.* becomes /%2E* + $refname =~ s{/\.}{/%2E}g; + + # It cannot have two consecutive dots .. anywhere + # .. becomes %2E%2E + $refname =~ s{\.\.}{%2E%2E}g; + + # trailing dots and .lock are not allowed + # .$ becomes %2E and .lock becomes %2Elock + $refname =~ s{\.(?=$|lock$)}{%2E}; + + # the sequence @{ is used to access the reflog + # @{ becomes %40{ + $refname =~ s{\@\{}{%40\{}g; + + return $refname; +} + +sub desanitize_refname { + my ($refname) = @_; + $refname =~ s{%(?:([0-9A-F]{2}))}{chr hex($1)}eg; + return $refname; +} + +sub svm_uuid { + my ($self) = @_; + return $self->{svm}->{uuid} if $self->svm; + $self->ra; + unless ($self->{svm}) { + die "SVM UUID not cached, and reading remotely failed\n"; + } + $self->{svm}->{uuid}; +} + +sub svm { + my ($self) = @_; + return $self->{svm} if $self->{svm}; + my $svm; + # see if we have it in our config, first: + eval { + my $section = "svn-remote.$self->{repo_id}"; + $svm = { + source => tmp_config('--get', "$section.svm-source"), + uuid => tmp_config('--get', "$section.svm-uuid"), + replace => tmp_config('--get', "$section.svm-replace"), + } + }; + if ($svm && $svm->{source} && $svm->{uuid} && $svm->{replace}) { + $self->{svm} = $svm; + } + $self->{svm}; +} + +sub _set_svm_vars { + my ($self, $ra) = @_; + return $ra if $self->svm; + + my @err = ( "useSvmProps set, but failed to read SVM properties\n", + "(svm:source, svm:uuid) ", + "from the following URLs:\n" ); + sub read_svm_props { + my ($self, $ra, $path, $r) = @_; + my $props = ($ra->get_dir($path, $r))[2]; + my $src = $props->{'svm:source'}; + my $uuid = $props->{'svm:uuid'}; + return undef if (!$src || !$uuid); + + chomp($src, $uuid); + + $uuid =~ m{^[0-9a-f\-]{30,}$}i + or die "doesn't look right - svm:uuid is '$uuid'\n"; + + # the '!' is used to mark the repos_root!/relative/path + $src =~ s{/?!/?}{/}; + $src =~ s{/+$}{}; # no trailing slashes please + # username is of no interest + $src =~ s{(^[a-z\+]*://)[^/@]*@}{$1}; + + my $replace = $ra->{url}; + $replace .= "/$path" if length $path; + + my $section = "svn-remote.$self->{repo_id}"; + tmp_config("$section.svm-source", $src); + tmp_config("$section.svm-replace", $replace); + tmp_config("$section.svm-uuid", $uuid); + $self->{svm} = { + source => $src, + uuid => $uuid, + replace => $replace + }; + } + + my $r = $ra->get_latest_revnum; + my $path = $self->{path}; + my %tried; + while (length $path) { + unless ($tried{"$self->{url}/$path"}) { + return $ra if $self->read_svm_props($ra, $path, $r); + $tried{"$self->{url}/$path"} = 1; + } + $path =~ s#/?[^/]+$##; + } + die "Path: '$path' should be ''\n" if $path ne ''; + return $ra if $self->read_svm_props($ra, $path, $r); + $tried{"$self->{url}/$path"} = 1; + + if ($ra->{repos_root} eq $self->{url}) { + die @err, (map { " $_\n" } keys %tried), "\n"; + } + + # nope, make sure we're connected to the repository root: + my $ok; + my @tried_b; + $path = $ra->{svn_path}; + $ra = Git::SVN::Ra->new($ra->{repos_root}); + while (length $path) { + unless ($tried{"$ra->{url}/$path"}) { + $ok = $self->read_svm_props($ra, $path, $r); + last if $ok; + $tried{"$ra->{url}/$path"} = 1; + } + $path =~ s#/?[^/]+$##; + } + die "Path: '$path' should be ''\n" if $path ne ''; + $ok ||= $self->read_svm_props($ra, $path, $r); + $tried{"$ra->{url}/$path"} = 1; + if (!$ok) { + die @err, (map { " $_\n" } keys %tried), "\n"; + } + Git::SVN::Ra->new($self->{url}); +} + +sub svnsync { + my ($self) = @_; + return $self->{svnsync} if $self->{svnsync}; + + if ($self->no_metadata) { + die "Can't have both 'noMetadata' and ", + "'useSvnsyncProps' options set!\n"; + } + if ($self->rewrite_root) { + die "Can't have both 'useSvnsyncProps' and 'rewriteRoot' ", + "options set!\n"; + } + if ($self->rewrite_uuid) { + die "Can't have both 'useSvnsyncProps' and 'rewriteUUID' ", + "options set!\n"; + } + + my $svnsync; + # see if we have it in our config, first: + eval { + my $section = "svn-remote.$self->{repo_id}"; + + my $url = tmp_config('--get', "$section.svnsync-url"); + ($url) = ($url =~ m{^([a-z\+]+://\S+)$}) or + die "doesn't look right - svn:sync-from-url is '$url'\n"; + + my $uuid = tmp_config('--get', "$section.svnsync-uuid"); + ($uuid) = ($uuid =~ m{^([0-9a-f\-]{30,})$}i) or + die "doesn't look right - svn:sync-from-uuid is '$uuid'\n"; + + $svnsync = { url => $url, uuid => $uuid } + }; + if ($svnsync && $svnsync->{url} && $svnsync->{uuid}) { + return $self->{svnsync} = $svnsync; + } + + my $err = "useSvnsyncProps set, but failed to read " . + "svnsync property: svn:sync-from-"; + my $rp = $self->ra->rev_proplist(0); + + my $url = $rp->{'svn:sync-from-url'} or die $err . "url\n"; + ($url) = ($url =~ m{^([a-z\+]+://\S+)$}) or + die "doesn't look right - svn:sync-from-url is '$url'\n"; + + my $uuid = $rp->{'svn:sync-from-uuid'} or die $err . "uuid\n"; + ($uuid) = ($uuid =~ m{^([0-9a-f\-]{30,})$}i) or + die "doesn't look right - svn:sync-from-uuid is '$uuid'\n"; + + my $section = "svn-remote.$self->{repo_id}"; + tmp_config('--add', "$section.svnsync-uuid", $uuid); + tmp_config('--add', "$section.svnsync-url", $url); + return $self->{svnsync} = { url => $url, uuid => $uuid }; +} + +# this allows us to memoize our SVN::Ra UUID locally and avoid a +# remote lookup (useful for 'git svn log'). +sub ra_uuid { + my ($self) = @_; + unless ($self->{ra_uuid}) { + my $key = "svn-remote.$self->{repo_id}.uuid"; + my $uuid = eval { tmp_config('--get', $key) }; + if (!$@ && $uuid && $uuid =~ /^([a-f\d\-]{30,})$/i) { + $self->{ra_uuid} = $uuid; + } else { + die "ra_uuid called without URL\n" unless $self->{url}; + $self->{ra_uuid} = $self->ra->get_uuid; + tmp_config('--add', $key, $self->{ra_uuid}); + } + } + $self->{ra_uuid}; +} + +sub _set_repos_root { + my ($self, $repos_root) = @_; + my $k = "svn-remote.$self->{repo_id}.reposRoot"; + $repos_root ||= $self->ra->{repos_root}; + tmp_config($k, $repos_root); + $repos_root; +} + +sub repos_root { + my ($self) = @_; + my $k = "svn-remote.$self->{repo_id}.reposRoot"; + eval { tmp_config('--get', $k) } || $self->_set_repos_root; +} + +sub ra { + my ($self) = shift; + my $ra = Git::SVN::Ra->new($self->{url}); + $self->_set_repos_root($ra->{repos_root}); + if ($self->use_svm_props && !$self->{svm}) { + if ($self->no_metadata) { + die "Can't have both 'noMetadata' and ", + "'useSvmProps' options set!\n"; + } elsif ($self->use_svnsync_props) { + die "Can't have both 'useSvnsyncProps' and ", + "'useSvmProps' options set!\n"; + } + $ra = $self->_set_svm_vars($ra); + $self->{-want_revprops} = 1; + } + $ra; +} + +# prop_walk(PATH, REV, SUB) +# ------------------------- +# Recursively traverse PATH at revision REV and invoke SUB for each +# directory that contains a SVN property. SUB will be invoked as +# follows: &SUB(gs, path, props); where `gs' is this instance of +# Git::SVN, `path' the path to the directory where the properties +# `props' were found. The `path' will be relative to point of checkout, +# that is, if url://repo/trunk is the current Git branch, and that +# directory contains a sub-directory `d', SUB will be invoked with `/d/' +# as `path' (note the trailing `/'). +sub prop_walk { + my ($self, $path, $rev, $sub) = @_; + + $path =~ s#^/##; + my ($dirent, undef, $props) = $self->ra->get_dir($path, $rev); + $path =~ s#^/*#/#g; + my $p = $path; + # Strip the irrelevant part of the path. + $p =~ s#^/+\Q$self->{path}\E(/|$)#/#; + # Ensure the path is terminated by a `/'. + $p =~ s#/*$#/#; + + # The properties contain all the internal SVN stuff nobody + # (usually) cares about. + my $interesting_props = 0; + foreach (keys %{$props}) { + # If it doesn't start with `svn:', it must be a + # user-defined property. + ++$interesting_props and next if $_ !~ /^svn:/; + # FIXME: Fragile, if SVN adds new public properties, + # this needs to be updated. + ++$interesting_props if /^svn:(?:ignore|keywords|executable + |eol-style|mime-type + |externals|needs-lock)$/x; + } + &$sub($self, $p, $props) if $interesting_props; + + foreach (sort keys %$dirent) { + next if $dirent->{$_}->{kind} != $SVN::Node::dir; + $self->prop_walk($self->{path} . $p . $_, $rev, $sub); + } +} + +sub last_rev { ($_[0]->last_rev_commit)[0] } +sub last_commit { ($_[0]->last_rev_commit)[1] } + +# returns the newest SVN revision number and newest commit SHA1 +sub last_rev_commit { + my ($self) = @_; + if (defined $self->{last_rev} && defined $self->{last_commit}) { + return ($self->{last_rev}, $self->{last_commit}); + } + my $c = ::verify_ref($self->refname.'^0'); + if ($c && !$self->use_svm_props && !$self->no_metadata) { + my $rev = (::cmt_metadata($c))[1]; + if (defined $rev) { + ($self->{last_rev}, $self->{last_commit}) = ($rev, $c); + return ($rev, $c); + } + } + my $map_path = $self->map_path; + unless (-e $map_path) { + ($self->{last_rev}, $self->{last_commit}) = (undef, undef); + return (undef, undef); + } + my ($rev, $commit) = $self->rev_map_max(1); + ($self->{last_rev}, $self->{last_commit}) = ($rev, $commit); + return ($rev, $commit); +} + +sub get_fetch_range { + my ($self, $min, $max) = @_; + $max ||= $self->ra->get_latest_revnum; + $min ||= $self->rev_map_max; + (++$min, $max); +} + +sub tmp_config { + my (@args) = @_; + my $old_def_config = "$ENV{GIT_DIR}/svn/config"; + my $config = "$ENV{GIT_DIR}/svn/.metadata"; + if (! -f $config && -f $old_def_config) { + rename $old_def_config, $config or + die "Failed rename $old_def_config => $config: $!\n"; + } + my $old_config = $ENV{GIT_CONFIG}; + $ENV{GIT_CONFIG} = $config; + $@ = undef; + my @ret = eval { + unless (-f $config) { + mkfile($config); + open my $fh, '>', $config or + die "Can't open $config: $!\n"; + print $fh "; This file is used internally by ", + "git-svn\n" or die + "Couldn't write to $config: $!\n"; + print $fh "; You should not have to edit it\n" or + die "Couldn't write to $config: $!\n"; + close $fh or die "Couldn't close $config: $!\n"; + } + command('config', @args); + }; + my $err = $@; + if (defined $old_config) { + $ENV{GIT_CONFIG} = $old_config; + } else { + delete $ENV{GIT_CONFIG}; + } + die $err if $err; + wantarray ? @ret : $ret[0]; +} + +sub tmp_index_do { + my ($self, $sub) = @_; + my $old_index = $ENV{GIT_INDEX_FILE}; + $ENV{GIT_INDEX_FILE} = $self->{index}; + $@ = undef; + my @ret = eval { + my ($dir, $base) = ($self->{index} =~ m#^(.*?)/?([^/]+)$#); + mkpath([$dir]) unless -d $dir; + &$sub; + }; + my $err = $@; + if (defined $old_index) { + $ENV{GIT_INDEX_FILE} = $old_index; + } else { + delete $ENV{GIT_INDEX_FILE}; + } + die $err if $err; + wantarray ? @ret : $ret[0]; +} + +sub assert_index_clean { + my ($self, $treeish) = @_; + + $self->tmp_index_do(sub { + command_noisy('read-tree', $treeish) unless -e $self->{index}; + my $x = command_oneline('write-tree'); + my ($y) = (command(qw/cat-file commit/, $treeish) =~ + /^tree ($::sha1)/mo); + return if $y eq $x; + + warn "Index mismatch: $y != $x\nrereading $treeish\n"; + unlink $self->{index} or die "unlink $self->{index}: $!\n"; + command_noisy('read-tree', $treeish); + $x = command_oneline('write-tree'); + if ($y ne $x) { + ::fatal "trees ($treeish) $y != $x\n", + "Something is seriously wrong..."; + } + }); +} + +sub get_commit_parents { + my ($self, $log_entry) = @_; + my (%seen, @ret, @tmp); + # legacy support for 'set-tree'; this is only used by set_tree_cb: + if (my $ip = $self->{inject_parents}) { + if (my $commit = delete $ip->{$log_entry->{revision}}) { + push @tmp, $commit; + } + } + if (my $cur = ::verify_ref($self->refname.'^0')) { + push @tmp, $cur; + } + if (my $ipd = $self->{inject_parents_dcommit}) { + if (my $commit = delete $ipd->{$log_entry->{revision}}) { + push @tmp, @$commit; + } + } + push @tmp, $_ foreach (@{$log_entry->{parents}}, @tmp); + while (my $p = shift @tmp) { + next if $seen{$p}; + $seen{$p} = 1; + push @ret, $p; + } + @ret; +} + +sub rewrite_root { + my ($self) = @_; + return $self->{-rewrite_root} if exists $self->{-rewrite_root}; + my $k = "svn-remote.$self->{repo_id}.rewriteRoot"; + my $rwr = eval { command_oneline(qw/config --get/, $k) }; + if ($rwr) { + $rwr =~ s#/+$##; + if ($rwr !~ m#^[a-z\+]+://#) { + die "$rwr is not a valid URL (key: $k)\n"; + } + } + $self->{-rewrite_root} = $rwr; +} + +sub rewrite_uuid { + my ($self) = @_; + return $self->{-rewrite_uuid} if exists $self->{-rewrite_uuid}; + my $k = "svn-remote.$self->{repo_id}.rewriteUUID"; + my $rwid = eval { command_oneline(qw/config --get/, $k) }; + if ($rwid) { + $rwid =~ s#/+$##; + if ($rwid !~ m#^[a-f0-9]{8}-(?:[a-f0-9]{4}-){3}[a-f0-9]{12}$#) { + die "$rwid is not a valid UUID (key: $k)\n"; + } + } + $self->{-rewrite_uuid} = $rwid; +} + +sub metadata_url { + my ($self) = @_; + ($self->rewrite_root || $self->{url}) . + (length $self->{path} ? '/' . $self->{path} : ''); +} + +sub full_url { + my ($self) = @_; + $self->{url} . (length $self->{path} ? '/' . $self->{path} : ''); +} + + +sub set_commit_header_env { + my ($log_entry) = @_; + my %env; + foreach my $ned (qw/NAME EMAIL DATE/) { + foreach my $ac (qw/AUTHOR COMMITTER/) { + $env{"GIT_${ac}_${ned}"} = $ENV{"GIT_${ac}_${ned}"}; + } + } + + $ENV{GIT_AUTHOR_NAME} = $log_entry->{name}; + $ENV{GIT_AUTHOR_EMAIL} = $log_entry->{email}; + $ENV{GIT_AUTHOR_DATE} = $ENV{GIT_COMMITTER_DATE} = $log_entry->{date}; + + $ENV{GIT_COMMITTER_NAME} = (defined $log_entry->{commit_name}) + ? $log_entry->{commit_name} + : $log_entry->{name}; + $ENV{GIT_COMMITTER_EMAIL} = (defined $log_entry->{commit_email}) + ? $log_entry->{commit_email} + : $log_entry->{email}; + \%env; +} + +sub restore_commit_header_env { + my ($env) = @_; + foreach my $ned (qw/NAME EMAIL DATE/) { + foreach my $ac (qw/AUTHOR COMMITTER/) { + my $k = "GIT_${ac}_${ned}"; + if (defined $env->{$k}) { + $ENV{$k} = $env->{$k}; + } else { + delete $ENV{$k}; + } + } + } +} + +sub gc { + command_noisy('gc', '--auto'); +}; + +sub do_git_commit { + my ($self, $log_entry) = @_; + my $lr = $self->last_rev; + if (defined $lr && $lr >= $log_entry->{revision}) { + die "Last fetched revision of ", $self->refname, + " was r$lr, but we are about to fetch: ", + "r$log_entry->{revision}!\n"; + } + if (my $c = $self->rev_map_get($log_entry->{revision})) { + croak "$log_entry->{revision} = $c already exists! ", + "Why are we refetching it?\n"; + } + my $old_env = set_commit_header_env($log_entry); + my $tree = $log_entry->{tree}; + if (!defined $tree) { + $tree = $self->tmp_index_do(sub { + command_oneline('write-tree') }); + } + die "Tree is not a valid sha1: $tree\n" if $tree !~ /^$::sha1$/o; + + my @exec = ('git', 'commit-tree', $tree); + foreach ($self->get_commit_parents($log_entry)) { + push @exec, '-p', $_; + } + defined(my $pid = open3(my $msg_fh, my $out_fh, '>&STDERR', @exec)) + or croak $!; + binmode $msg_fh; + + # we always get UTF-8 from SVN, but we may want our commits in + # a different encoding. + if (my $enc = Git::config('i18n.commitencoding')) { + require Encode; + Encode::from_to($log_entry->{log}, 'UTF-8', $enc); + } + print $msg_fh $log_entry->{log} or croak $!; + restore_commit_header_env($old_env); + unless ($self->no_metadata) { + print $msg_fh "\ngit-svn-id: $log_entry->{metadata}\n" + or croak $!; + } + $msg_fh->flush == 0 or croak $!; + close $msg_fh or croak $!; + chomp(my $commit = do { local $/; <$out_fh> }); + close $out_fh or croak $!; + waitpid $pid, 0; + croak $? if $?; + if ($commit !~ /^$::sha1$/o) { + die "Failed to commit, invalid sha1: $commit\n"; + } + + $self->rev_map_set($log_entry->{revision}, $commit, 1); + + $self->{last_rev} = $log_entry->{revision}; + $self->{last_commit} = $commit; + print "r$log_entry->{revision}" unless $::_q > 1; + if (defined $log_entry->{svm_revision}) { + print " (\@$log_entry->{svm_revision})" unless $::_q > 1; + $self->rev_map_set($log_entry->{svm_revision}, $commit, + 0, $self->svm_uuid); + } + print " = $commit ($self->{ref_id})\n" unless $::_q > 1; + if (--$_gc_nr == 0) { + $_gc_nr = $_gc_period; + gc(); + } + return $commit; +} + +sub match_paths { + my ($self, $paths, $r) = @_; + return 1 if $self->{path} eq ''; + if (my $path = $paths->{"/$self->{path}"}) { + return ($path->{action} eq 'D') ? 0 : 1; + } + $self->{path_regex} ||= qr/^\/\Q$self->{path}\E\//; + if (grep /$self->{path_regex}/, keys %$paths) { + return 1; + } + my $c = ''; + foreach (split m#/#, $self->{path}) { + $c .= "/$_"; + next unless ($paths->{$c} && + ($paths->{$c}->{action} =~ /^[AR]$/)); + if ($self->ra->check_path($self->{path}, $r) == + $SVN::Node::dir) { + return 1; + } + } + return 0; +} + +sub find_parent_branch { + my ($self, $paths, $rev) = @_; + return undef unless $self->follow_parent; + unless (defined $paths) { + my $err_handler = $SVN::Error::handler; + $SVN::Error::handler = \&Git::SVN::Ra::skip_unknown_revs; + $self->ra->get_log([$self->{path}], $rev, $rev, 0, 1, 1, + sub { $paths = $_[0] }); + $SVN::Error::handler = $err_handler; + } + return undef unless defined $paths; + + # look for a parent from another branch: + my @b_path_components = split m#/#, $self->{path}; + my @a_path_components; + my $i; + while (@b_path_components) { + $i = $paths->{'/'.join('/', @b_path_components)}; + last if $i && defined $i->{copyfrom_path}; + unshift(@a_path_components, pop(@b_path_components)); + } + return undef unless defined $i && defined $i->{copyfrom_path}; + my $branch_from = $i->{copyfrom_path}; + if (@a_path_components) { + print STDERR "branch_from: $branch_from => "; + $branch_from .= '/'.join('/', @a_path_components); + print STDERR $branch_from, "\n"; + } + my $r = $i->{copyfrom_rev}; + my $repos_root = $self->ra->{repos_root}; + my $url = $self->ra->{url}; + my $new_url = $url . $branch_from; + print STDERR "Found possible branch point: ", + "$new_url => ", $self->full_url, ", $r\n" + unless $::_q > 1; + $branch_from =~ s#^/##; + my $gs = $self->other_gs($new_url, $url, + $branch_from, $r, $self->{ref_id}); + my ($r0, $parent) = $gs->find_rev_before($r, 1); + { + my ($base, $head); + if (!defined $r0 || !defined $parent) { + ($base, $head) = parse_revision_argument(0, $r); + } else { + if ($r0 < $r) { + $gs->ra->get_log([$gs->{path}], $r0 + 1, $r, 1, + 0, 1, sub { $base = $_[1] - 1 }); + } + } + if (defined $base && $base <= $r) { + $gs->fetch($base, $r); + } + ($r0, $parent) = $gs->find_rev_before($r, 1); + } + if (defined $r0 && defined $parent) { + print STDERR "Found branch parent: ($self->{ref_id}) $parent\n" + unless $::_q > 1; + my $ed; + if ($self->ra->can_do_switch) { + $self->assert_index_clean($parent); + print STDERR "Following parent with do_switch\n" + unless $::_q > 1; + # do_switch works with svn/trunk >= r22312, but that + # is not included with SVN 1.4.3 (the latest version + # at the moment), so we can't rely on it + $self->{last_rev} = $r0; + $self->{last_commit} = $parent; + $ed = SVN::Git::Fetcher->new($self, $gs->{path}); + $gs->ra->gs_do_switch($r0, $rev, $gs, + $self->full_url, $ed) + or die "SVN connection failed somewhere...\n"; + } elsif ($self->ra->trees_match($new_url, $r0, + $self->full_url, $rev)) { + print STDERR "Trees match:\n", + " $new_url\@$r0\n", + " ${\$self->full_url}\@$rev\n", + "Following parent with no changes\n" + unless $::_q > 1; + $self->tmp_index_do(sub { + command_noisy('read-tree', $parent); + }); + $self->{last_commit} = $parent; + } else { + print STDERR "Following parent with do_update\n" + unless $::_q > 1; + $ed = SVN::Git::Fetcher->new($self); + $self->ra->gs_do_update($rev, $rev, $self, $ed) + or die "SVN connection failed somewhere...\n"; + } + print STDERR "Successfully followed parent\n" unless $::_q > 1; + return $self->make_log_entry($rev, [$parent], $ed); + } + return undef; +} + +sub do_fetch { + my ($self, $paths, $rev) = @_; + my $ed; + my ($last_rev, @parents); + if (my $lc = $self->last_commit) { + # we can have a branch that was deleted, then re-added + # under the same name but copied from another path, in + # which case we'll have multiple parents (we don't + # want to break the original ref, nor lose copypath info): + if (my $log_entry = $self->find_parent_branch($paths, $rev)) { + push @{$log_entry->{parents}}, $lc; + return $log_entry; + } + $ed = SVN::Git::Fetcher->new($self); + $last_rev = $self->{last_rev}; + $ed->{c} = $lc; + @parents = ($lc); + } else { + $last_rev = $rev; + if (my $log_entry = $self->find_parent_branch($paths, $rev)) { + return $log_entry; + } + $ed = SVN::Git::Fetcher->new($self); + } + unless ($self->ra->gs_do_update($last_rev, $rev, $self, $ed)) { + die "SVN connection failed somewhere...\n"; + } + $self->make_log_entry($rev, \@parents, $ed); +} + +sub mkemptydirs { + my ($self, $r) = @_; + + sub scan { + my ($r, $empty_dirs, $line) = @_; + if (defined $r && $line =~ /^r(\d+)$/) { + return 0 if $1 > $r; + } elsif ($line =~ /^ \+empty_dir: (.+)$/) { + $empty_dirs->{$1} = 1; + } elsif ($line =~ /^ \-empty_dir: (.+)$/) { + my @d = grep {m[^\Q$1\E(/|$)]} (keys %$empty_dirs); + delete @$empty_dirs{@d}; + } + 1; # continue + }; + + my %empty_dirs = (); + my $gz_file = "$self->{dir}/unhandled.log.gz"; + if (-f $gz_file) { + if (!$can_compress) { + warn "Compress::Zlib could not be found; ", + "empty directories in $gz_file will not be read\n"; + } else { + my $gz = Compress::Zlib::gzopen($gz_file, "rb") or + die "Unable to open $gz_file: $!\n"; + my $line; + while ($gz->gzreadline($line) > 0) { + scan($r, \%empty_dirs, $line) or last; + } + $gz->gzclose; + } + } + + if (open my $fh, '<', "$self->{dir}/unhandled.log") { + binmode $fh or croak "binmode: $!"; + while (<$fh>) { + scan($r, \%empty_dirs, $_) or last; + } + close $fh; + } + + my $strip = qr/\A\Q$self->{path}\E(?:\/|$)/; + foreach my $d (sort keys %empty_dirs) { + $d = uri_decode($d); + $d =~ s/$strip//; + next unless length($d); + next if -d $d; + if (-e $d) { + warn "$d exists but is not a directory\n"; + } else { + print "creating empty directory: $d\n"; + mkpath([$d]); + } + } +} + +sub get_untracked { + my ($self, $ed) = @_; + my @out; + my $h = $ed->{empty}; + foreach (sort keys %$h) { + my $act = $h->{$_} ? '+empty_dir' : '-empty_dir'; + push @out, " $act: " . uri_encode($_); + warn "W: $act: $_\n"; + } + foreach my $t (qw/dir_prop file_prop/) { + $h = $ed->{$t} or next; + foreach my $path (sort keys %$h) { + my $ppath = $path eq '' ? '.' : $path; + foreach my $prop (sort keys %{$h->{$path}}) { + next if $SKIP_PROP{$prop}; + my $v = $h->{$path}->{$prop}; + my $t_ppath_prop = "$t: " . + uri_encode($ppath) . ' ' . + uri_encode($prop); + if (defined $v) { + push @out, " +$t_ppath_prop " . + uri_encode($v); + } else { + push @out, " -$t_ppath_prop"; + } + } + } + } + foreach my $t (qw/absent_file absent_directory/) { + $h = $ed->{$t} or next; + foreach my $parent (sort keys %$h) { + foreach my $path (sort @{$h->{$parent}}) { + push @out, " $t: " . + uri_encode("$parent/$path"); + warn "W: $t: $parent/$path ", + "Insufficient permissions?\n"; + } + } + } + \@out; +} + +# parse_svn_date(DATE) +# -------------------- +# Given a date (in UTC) from Subversion, return a string in the format +# " " that Git will use. +# +# By default the parsed date will be in UTC; if $Git::SVN::_localtime +# is true we'll convert it to the local timezone instead. +sub parse_svn_date { + my $date = shift || return '+0000 1970-01-01 00:00:00'; + my ($Y,$m,$d,$H,$M,$S) = ($date =~ /^(\d{4})\-(\d\d)\-(\d\d)T + (\d\d)\:(\d\d)\:(\d\d)\.\d*Z$/x) or + croak "Unable to parse date: $date\n"; + my $parsed_date; # Set next. + + if ($Git::SVN::_localtime) { + # Translate the Subversion datetime to an epoch time. + # Begin by switching ourselves to $date's timezone, UTC. + my $old_env_TZ = $ENV{TZ}; + $ENV{TZ} = 'UTC'; + + my $epoch_in_UTC = + POSIX::strftime('%s', $S, $M, $H, $d, $m - 1, $Y - 1900); + + # Determine our local timezone (including DST) at the + # time of $epoch_in_UTC. $Git::SVN::Log::TZ stored the + # value of TZ, if any, at the time we were run. + if (defined $Git::SVN::Log::TZ) { + $ENV{TZ} = $Git::SVN::Log::TZ; + } else { + delete $ENV{TZ}; + } + + my $our_TZ = + POSIX::strftime('%Z', $S, $M, $H, $d, $m - 1, $Y - 1900); + + # This converts $epoch_in_UTC into our local timezone. + my ($sec, $min, $hour, $mday, $mon, $year, + $wday, $yday, $isdst) = localtime($epoch_in_UTC); + + $parsed_date = sprintf('%s %04d-%02d-%02d %02d:%02d:%02d', + $our_TZ, $year + 1900, $mon + 1, + $mday, $hour, $min, $sec); + + # Reset us to the timezone in effect when we entered + # this routine. + if (defined $old_env_TZ) { + $ENV{TZ} = $old_env_TZ; + } else { + delete $ENV{TZ}; + } + } else { + $parsed_date = "+0000 $Y-$m-$d $H:$M:$S"; + } + + return $parsed_date; +} + +sub other_gs { + my ($self, $new_url, $url, + $branch_from, $r, $old_ref_id) = @_; + my $gs = Git::SVN->find_by_url($new_url, $url, $branch_from); + unless ($gs) { + my $ref_id = $old_ref_id; + $ref_id =~ s/\@\d+-*$//; + $ref_id .= "\@$r"; + # just grow a tail if we're not unique enough :x + $ref_id .= '-' while find_ref($ref_id); + my ($u, $p, $repo_id) = ($new_url, '', $ref_id); + if ($u =~ s#^\Q$url\E(/|$)##) { + $p = $u; + $u = $url; + $repo_id = $self->{repo_id}; + } + while (1) { + # It is possible to tag two different subdirectories at + # the same revision. If the url for an existing ref + # does not match, we must either find a ref with a + # matching url or create a new ref by growing a tail. + $gs = Git::SVN->init($u, $p, $repo_id, $ref_id, 1); + my (undef, $max_commit) = $gs->rev_map_max(1); + last if (!$max_commit); + my ($url) = ::cmt_metadata($max_commit); + last if ($url eq $gs->full_url); + $ref_id .= '-'; + } + print STDERR "Initializing parent: $ref_id\n" unless $::_q > 1; + } + $gs +} + +sub call_authors_prog { + my ($orig_author) = @_; + $orig_author = command_oneline('rev-parse', '--sq-quote', $orig_author); + my $author = `$::_authors_prog $orig_author`; + if ($? != 0) { + die "$::_authors_prog failed with exit code $?\n" + } + if ($author =~ /^\s*(.+?)\s*<(.*)>\s*$/) { + my ($name, $email) = ($1, $2); + $email = undef if length $2 == 0; + return [$name, $email]; + } else { + die "Author: $orig_author: $::_authors_prog returned " + . "invalid author format: $author\n"; + } +} + +sub check_author { + my ($author) = @_; + if (!defined $author || length $author == 0) { + $author = '(no author)'; + } + if (!defined $::users{$author}) { + if (defined $::_authors_prog) { + $::users{$author} = call_authors_prog($author); + } elsif (defined $::_authors) { + die "Author: $author not defined in $::_authors file\n"; + } + } + $author; +} + +sub find_extra_svk_parents { + my ($self, $ed, $tickets, $parents) = @_; + # aha! svk:merge property changed... + my @tickets = split "\n", $tickets; + my @known_parents; + for my $ticket ( @tickets ) { + my ($uuid, $path, $rev) = split /:/, $ticket; + if ( $uuid eq $self->ra_uuid ) { + my $url = $self->{url}; + my $repos_root = $url; + my $branch_from = $path; + $branch_from =~ s{^/}{}; + my $gs = $self->other_gs($repos_root."/".$branch_from, + $url, + $branch_from, + $rev, + $self->{ref_id}); + if ( my $commit = $gs->rev_map_get($rev, $uuid) ) { + # wahey! we found it, but it might be + # an old one (!) + push @known_parents, [ $rev, $commit ]; + } + } + } + # Ordering matters; highest-numbered commit merge tickets + # first, as they may account for later merge ticket additions + # or changes. + @known_parents = map {$_->[1]} sort {$b->[0] <=> $a->[0]} @known_parents; + for my $parent ( @known_parents ) { + my @cmd = ('rev-list', $parent, map { "^$_" } @$parents ); + my ($msg_fh, $ctx) = command_output_pipe(@cmd); + my $new; + while ( <$msg_fh> ) { + $new=1;last; + } + command_close_pipe($msg_fh, $ctx); + if ( $new ) { + print STDERR + "Found merge parent (svk:merge ticket): $parent\n"; + push @$parents, $parent; + } + } +} + +sub lookup_svn_merge { + my $uuid = shift; + my $url = shift; + my $merge = shift; + + my ($source, $revs) = split ":", $merge; + my $path = $source; + $path =~ s{^/}{}; + my $gs = Git::SVN->find_by_url($url.$source, $url, $path); + if ( !$gs ) { + warn "Couldn't find revmap for $url$source\n"; + return; + } + my @ranges = split ",", $revs; + my ($tip, $tip_commit); + my @merged_commit_ranges; + # find the tip + for my $range ( @ranges ) { + my ($bottom, $top) = split "-", $range; + $top ||= $bottom; + my $bottom_commit = $gs->find_rev_after( $bottom, 1, $top ); + my $top_commit = $gs->find_rev_before( $top, 1, $bottom ); + + unless ($top_commit and $bottom_commit) { + warn "W:unknown path/rev in svn:mergeinfo " + ."dirprop: $source:$range\n"; + next; + } + + push @merged_commit_ranges, + "$bottom_commit^..$top_commit"; + + if ( !defined $tip or $top > $tip ) { + $tip = $top; + $tip_commit = $top_commit; + } + } + return ($tip_commit, @merged_commit_ranges); +} + +sub _rev_list { + my ($msg_fh, $ctx) = command_output_pipe( + "rev-list", @_, + ); + my @rv; + while ( <$msg_fh> ) { + chomp; + push @rv, $_; + } + command_close_pipe($msg_fh, $ctx); + @rv; +} + +sub check_cherry_pick { + my $base = shift; + my $tip = shift; + my $parents = shift; + my @ranges = @_; + my %commits = map { $_ => 1 } + _rev_list("--no-merges", $tip, "--not", $base, @$parents); + for my $range ( @ranges ) { + delete @commits{_rev_list($range)}; + } + for my $commit (keys %commits) { + if (has_no_changes($commit)) { + delete $commits{$commit}; + } + } + return (keys %commits); +} + +sub has_no_changes { + my $commit = shift; + + my @revs = split / /, command_oneline( + qw(rev-list --parents -1 -m), $commit); + + # Commits with no parents, e.g. the start of a partial branch, + # have changes by definition. + return 1 if (@revs < 2); + + # Commits with multiple parents, e.g a merge, have no changes + # by definition. + return 0 if (@revs > 2); + + return (command_oneline("rev-parse", "$commit^{tree}") eq + command_oneline("rev-parse", "$commit~1^{tree}")); +} + +# The GIT_DIR environment variable is not always set until after the command +# line arguments are processed, so we can't memoize in a BEGIN block. +{ + my $memoized = 0; + + sub memoize_svn_mergeinfo_functions { + return if $memoized; + $memoized = 1; + + my $cache_path = "$ENV{GIT_DIR}/svn/.caches/"; + mkpath([$cache_path]) unless -d $cache_path; + + tie my %lookup_svn_merge_cache => 'Memoize::Storable', + "$cache_path/lookup_svn_merge.db", 'nstore'; + memoize 'lookup_svn_merge', + SCALAR_CACHE => 'FAULT', + LIST_CACHE => ['HASH' => \%lookup_svn_merge_cache], + ; + + tie my %check_cherry_pick_cache => 'Memoize::Storable', + "$cache_path/check_cherry_pick.db", 'nstore'; + memoize 'check_cherry_pick', + SCALAR_CACHE => 'FAULT', + LIST_CACHE => ['HASH' => \%check_cherry_pick_cache], + ; + + tie my %has_no_changes_cache => 'Memoize::Storable', + "$cache_path/has_no_changes.db", 'nstore'; + memoize 'has_no_changes', + SCALAR_CACHE => ['HASH' => \%has_no_changes_cache], + LIST_CACHE => 'FAULT', + ; + } + + sub unmemoize_svn_mergeinfo_functions { + return if not $memoized; + $memoized = 0; + + Memoize::unmemoize 'lookup_svn_merge'; + Memoize::unmemoize 'check_cherry_pick'; + Memoize::unmemoize 'has_no_changes'; + } +} + +END { + # Force cache writeout explicitly instead of waiting for + # global destruction to avoid segfault in Storable: + # http://rt.cpan.org/Public/Bug/Display.html?id=36087 + unmemoize_svn_mergeinfo_functions(); +} + +sub parents_exclude { + my $parents = shift; + my @commits = @_; + return unless @commits; + + my @excluded; + my $excluded; + do { + my @cmd = ('rev-list', "-1", @commits, "--not", @$parents ); + $excluded = command_oneline(@cmd); + if ( $excluded ) { + my @new; + my $found; + for my $commit ( @commits ) { + if ( $commit eq $excluded ) { + push @excluded, $commit; + $found++; + last; + } + else { + push @new, $commit; + } + } + die "saw commit '$excluded' in rev-list output, " + ."but we didn't ask for that commit (wanted: @commits --not @$parents)" + unless $found; + @commits = @new; + } + } + while ($excluded and @commits); + + return @excluded; +} + + +# note: this function should only be called if the various dirprops +# have actually changed +sub find_extra_svn_parents { + my ($self, $ed, $mergeinfo, $parents) = @_; + # aha! svk:merge property changed... + + memoize_svn_mergeinfo_functions(); + + # We first search for merged tips which are not in our + # history. Then, we figure out which git revisions are in + # that tip, but not this revision. If all of those revisions + # are now marked as merge, we can add the tip as a parent. + my @merges = split "\n", $mergeinfo; + my @merge_tips; + my $url = $self->{url}; + my $uuid = $self->ra_uuid; + my %ranges; + for my $merge ( @merges ) { + my ($tip_commit, @ranges) = + lookup_svn_merge( $uuid, $url, $merge ); + unless (!$tip_commit or + grep { $_ eq $tip_commit } @$parents ) { + push @merge_tips, $tip_commit; + $ranges{$tip_commit} = \@ranges; + } else { + push @merge_tips, undef; + } + } + + my %excluded = map { $_ => 1 } + parents_exclude($parents, grep { defined } @merge_tips); + + # check merge tips for new parents + my @new_parents; + for my $merge_tip ( @merge_tips ) { + my $spec = shift @merges; + next unless $merge_tip and $excluded{$merge_tip}; + + my $ranges = $ranges{$merge_tip}; + + # check out 'new' tips + my $merge_base; + eval { + $merge_base = command_oneline( + "merge-base", + @$parents, $merge_tip, + ); + }; + if ($@) { + die "An error occurred during merge-base" + unless $@->isa("Git::Error::Command"); + + warn "W: Cannot find common ancestor between ". + "@$parents and $merge_tip. Ignoring merge info.\n"; + next; + } + + # double check that there are no missing non-merge commits + my (@incomplete) = check_cherry_pick( + $merge_base, $merge_tip, + $parents, + @$ranges, + ); + + if ( @incomplete ) { + warn "W:svn cherry-pick ignored ($spec) - missing " + .@incomplete." commit(s) (eg $incomplete[0])\n"; + } else { + warn + "Found merge parent (svn:mergeinfo prop): ", + $merge_tip, "\n"; + push @new_parents, $merge_tip; + } + } + + # cater for merges which merge commits from multiple branches + if ( @new_parents > 1 ) { + for ( my $i = 0; $i <= $#new_parents; $i++ ) { + for ( my $j = 0; $j <= $#new_parents; $j++ ) { + next if $i == $j; + next unless $new_parents[$i]; + next unless $new_parents[$j]; + my $revs = command_oneline( + "rev-list", "-1", + "$new_parents[$i]..$new_parents[$j]", + ); + if ( !$revs ) { + undef($new_parents[$j]); + } + } + } + } + push @$parents, grep { defined } @new_parents; +} + +sub make_log_entry { + my ($self, $rev, $parents, $ed) = @_; + my $untracked = $self->get_untracked($ed); + + my @parents = @$parents; + my $ps = $ed->{path_strip} || ""; + for my $path ( grep { m/$ps/ } %{$ed->{dir_prop}} ) { + my $props = $ed->{dir_prop}{$path}; + if ( $props->{"svk:merge"} ) { + $self->find_extra_svk_parents + ($ed, $props->{"svk:merge"}, \@parents); + } + if ( $props->{"svn:mergeinfo"} ) { + $self->find_extra_svn_parents + ($ed, + $props->{"svn:mergeinfo"}, + \@parents); + } + } + + open my $un, '>>', "$self->{dir}/unhandled.log" or croak $!; + print $un "r$rev\n" or croak $!; + print $un $_, "\n" foreach @$untracked; + my %log_entry = ( parents => \@parents, revision => $rev, + log => ''); + + my $headrev; + my $logged = delete $self->{logged_rev_props}; + if (!$logged || $self->{-want_revprops}) { + my $rp = $self->ra->rev_proplist($rev); + foreach (sort keys %$rp) { + my $v = $rp->{$_}; + if (/^svn:(author|date|log)$/) { + $log_entry{$1} = $v; + } elsif ($_ eq 'svm:headrev') { + $headrev = $v; + } else { + print $un " rev_prop: ", uri_encode($_), ' ', + uri_encode($v), "\n"; + } + } + } else { + map { $log_entry{$_} = $logged->{$_} } keys %$logged; + } + close $un or croak $!; + + $log_entry{date} = parse_svn_date($log_entry{date}); + $log_entry{log} .= "\n"; + my $author = $log_entry{author} = check_author($log_entry{author}); + my ($name, $email) = defined $::users{$author} ? @{$::users{$author}} + : ($author, undef); + + my ($commit_name, $commit_email) = ($name, $email); + if ($_use_log_author) { + my $name_field; + if ($log_entry{log} =~ /From:\s+(.*\S)\s*\n/i) { + $name_field = $1; + } elsif ($log_entry{log} =~ /Signed-off-by:\s+(.*\S)\s*\n/i) { + $name_field = $1; + } + if (!defined $name_field) { + if (!defined $email) { + $email = $name; + } + } elsif ($name_field =~ /(.*?)\s+<(.*)>/) { + ($name, $email) = ($1, $2); + } elsif ($name_field =~ /(.*)@/) { + ($name, $email) = ($1, $name_field); + } else { + ($name, $email) = ($name_field, $name_field); + } + } + if (defined $headrev && $self->use_svm_props) { + if ($self->rewrite_root) { + die "Can't have both 'useSvmProps' and 'rewriteRoot' ", + "options set!\n"; + } + if ($self->rewrite_uuid) { + die "Can't have both 'useSvmProps' and 'rewriteUUID' ", + "options set!\n"; + } + my ($uuid, $r) = $headrev =~ m{^([a-f\d\-]{30,}):(\d+)$}i; + # we don't want "SVM: initializing mirror for junk" ... + return undef if $r == 0; + my $svm = $self->svm; + if ($uuid ne $svm->{uuid}) { + die "UUID mismatch on SVM path:\n", + "expected: $svm->{uuid}\n", + " got: $uuid\n"; + } + my $full_url = $self->full_url; + $full_url =~ s#^\Q$svm->{replace}\E(/|$)#$svm->{source}$1# or + die "Failed to replace '$svm->{replace}' with ", + "'$svm->{source}' in $full_url\n"; + # throw away username for storing in records + remove_username($full_url); + $log_entry{metadata} = "$full_url\@$r $uuid"; + $log_entry{svm_revision} = $r; + $email ||= "$author\@$uuid"; + $commit_email ||= "$author\@$uuid"; + } elsif ($self->use_svnsync_props) { + my $full_url = $self->svnsync->{url}; + $full_url .= "/$self->{path}" if length $self->{path}; + remove_username($full_url); + my $uuid = $self->svnsync->{uuid}; + $log_entry{metadata} = "$full_url\@$rev $uuid"; + $email ||= "$author\@$uuid"; + $commit_email ||= "$author\@$uuid"; + } else { + my $url = $self->metadata_url; + remove_username($url); + my $uuid = $self->rewrite_uuid || $self->ra->get_uuid; + $log_entry{metadata} = "$url\@$rev " . $uuid; + $email ||= "$author\@" . $uuid; + $commit_email ||= "$author\@" . $uuid; + } + $log_entry{name} = $name; + $log_entry{email} = $email; + $log_entry{commit_name} = $commit_name; + $log_entry{commit_email} = $commit_email; + \%log_entry; +} + +sub fetch { + my ($self, $min_rev, $max_rev, @parents) = @_; + my ($last_rev, $last_commit) = $self->last_rev_commit; + my ($base, $head) = $self->get_fetch_range($min_rev, $max_rev); + $self->ra->gs_fetch_loop_common($base, $head, [$self]); +} + +sub set_tree_cb { + my ($self, $log_entry, $tree, $rev, $date, $author) = @_; + $self->{inject_parents} = { $rev => $tree }; + $self->fetch(undef, undef); +} + +sub set_tree { + my ($self, $tree) = (shift, shift); + my $log_entry = ::get_commit_entry($tree); + unless ($self->{last_rev}) { + ::fatal("Must have an existing revision to commit"); + } + my %ed_opts = ( r => $self->{last_rev}, + log => $log_entry->{log}, + ra => $self->ra, + tree_a => $self->{last_commit}, + tree_b => $tree, + editor_cb => sub { + $self->set_tree_cb($log_entry, $tree, @_) }, + svn_path => $self->{path} ); + if (!SVN::Git::Editor->new(\%ed_opts)->apply_diff) { + print "No changes\nr$self->{last_rev} = $tree\n"; + } +} + +sub rebuild_from_rev_db { + my ($self, $path) = @_; + my $r = -1; + open my $fh, '<', $path or croak "open: $!"; + binmode $fh or croak "binmode: $!"; + while (<$fh>) { + length($_) == 41 or croak "inconsistent size in ($_) != 41"; + chomp($_); + ++$r; + next if $_ eq ('0' x 40); + $self->rev_map_set($r, $_); + print "r$r = $_\n"; + } + close $fh or croak "close: $!"; + unlink $path or croak "unlink: $!"; +} + +sub rebuild { + my ($self) = @_; + my $map_path = $self->map_path; + my $partial = (-e $map_path && ! -z $map_path); + return unless ::verify_ref($self->refname.'^0'); + if (!$partial && ($self->use_svm_props || $self->no_metadata)) { + my $rev_db = $self->rev_db_path; + $self->rebuild_from_rev_db($rev_db); + if ($self->use_svm_props) { + my $svm_rev_db = $self->rev_db_path($self->svm_uuid); + $self->rebuild_from_rev_db($svm_rev_db); + } + $self->unlink_rev_db_symlink; + return; + } + print "Rebuilding $map_path ...\n" if (!$partial); + my ($base_rev, $head) = ($partial ? $self->rev_map_max_norebuild(1) : + (undef, undef)); + my ($log, $ctx) = + command_output_pipe(qw/rev-list --pretty=raw --no-color --reverse/, + ($head ? "$head.." : "") . $self->refname, + '--'); + my $metadata_url = $self->metadata_url; + remove_username($metadata_url); + my $svn_uuid = $self->rewrite_uuid || $self->ra_uuid; + my $c; + while (<$log>) { + if ( m{^commit ($::sha1)$} ) { + $c = $1; + next; + } + next unless s{^\s*(git-svn-id:)}{$1}; + my ($url, $rev, $uuid) = ::extract_metadata($_); + remove_username($url); + + # ignore merges (from set-tree) + next if (!defined $rev || !$uuid); + + # if we merged or otherwise started elsewhere, this is + # how we break out of it + if (($uuid ne $svn_uuid) || + ($metadata_url && $url && ($url ne $metadata_url))) { + next; + } + if ($partial && $head) { + print "Partial-rebuilding $map_path ...\n"; + print "Currently at $base_rev = $head\n"; + $head = undef; + } + + $self->rev_map_set($rev, $c); + print "r$rev = $c\n"; + } + command_close_pipe($log, $ctx); + print "Done rebuilding $map_path\n" if (!$partial || !$head); + my $rev_db_path = $self->rev_db_path; + if (-f $self->rev_db_path) { + unlink $self->rev_db_path or croak "unlink: $!"; + } + $self->unlink_rev_db_symlink; +} + +# rev_map: +# Tie::File seems to be prone to offset errors if revisions get sparse, +# it's not that fast, either. Tie::File is also not in Perl 5.6. So +# one of my favorite modules is out :< Next up would be one of the DBM +# modules, but I'm not sure which is most portable... +# +# This is the replacement for the rev_db format, which was too big +# and inefficient for large repositories with a lot of sparse history +# (mainly tags) +# +# The format is this: +# - 24 bytes for every record, +# * 4 bytes for the integer representing an SVN revision number +# * 20 bytes representing the sha1 of a git commit +# - No empty padding records like the old format +# (except the last record, which can be overwritten) +# - new records are written append-only since SVN revision numbers +# increase monotonically +# - lookups on SVN revision number are done via a binary search +# - Piping the file to xxd -c24 is a good way of dumping it for +# viewing or editing (piped back through xxd -r), should the need +# ever arise. +# - The last record can be padding revision with an all-zero sha1 +# This is used to optimize fetch performance when using multiple +# "fetch" directives in .git/config +# +# These files are disposable unless noMetadata or useSvmProps is set + +sub _rev_map_set { + my ($fh, $rev, $commit) = @_; + + binmode $fh or croak "binmode: $!"; + my $size = (stat($fh))[7]; + ($size % 24) == 0 or croak "inconsistent size: $size"; + + my $wr_offset = 0; + if ($size > 0) { + sysseek($fh, -24, SEEK_END) or croak "seek: $!"; + my $read = sysread($fh, my $buf, 24) or croak "read: $!"; + $read == 24 or croak "read only $read bytes (!= 24)"; + my ($last_rev, $last_commit) = unpack(rev_map_fmt, $buf); + if ($last_commit eq ('0' x40)) { + if ($size >= 48) { + sysseek($fh, -48, SEEK_END) or croak "seek: $!"; + $read = sysread($fh, $buf, 24) or + croak "read: $!"; + $read == 24 or + croak "read only $read bytes (!= 24)"; + ($last_rev, $last_commit) = + unpack(rev_map_fmt, $buf); + if ($last_commit eq ('0' x40)) { + croak "inconsistent .rev_map\n"; + } + } + if ($last_rev >= $rev) { + croak "last_rev is higher!: $last_rev >= $rev"; + } + $wr_offset = -24; + } + } + sysseek($fh, $wr_offset, SEEK_END) or croak "seek: $!"; + syswrite($fh, pack(rev_map_fmt, $rev, $commit), 24) == 24 or + croak "write: $!"; +} + +sub _rev_map_reset { + my ($fh, $rev, $commit) = @_; + my $c = _rev_map_get($fh, $rev); + $c eq $commit or die "_rev_map_reset(@_) commit $c does not match!\n"; + my $offset = sysseek($fh, 0, SEEK_CUR) or croak "seek: $!"; + truncate $fh, $offset or croak "truncate: $!"; +} + +sub mkfile { + my ($path) = @_; + unless (-e $path) { + my ($dir, $base) = ($path =~ m#^(.*?)/?([^/]+)$#); + mkpath([$dir]) unless -d $dir; + open my $fh, '>>', $path or die "Couldn't create $path: $!\n"; + close $fh or die "Couldn't close (create) $path: $!\n"; + } +} + +sub rev_map_set { + my ($self, $rev, $commit, $update_ref, $uuid) = @_; + defined $commit or die "missing arg3\n"; + length $commit == 40 or die "arg3 must be a full SHA1 hexsum\n"; + my $db = $self->map_path($uuid); + my $db_lock = "$db.lock"; + my $sig; + $update_ref ||= 0; + if ($update_ref) { + $SIG{INT} = $SIG{HUP} = $SIG{TERM} = $SIG{ALRM} = $SIG{PIPE} = + $SIG{USR1} = $SIG{USR2} = sub { $sig = $_[0] }; + } + mkfile($db); + + $LOCKFILES{$db_lock} = 1; + my $sync; + # both of these options make our .rev_db file very, very important + # and we can't afford to lose it because rebuild() won't work + if ($self->use_svm_props || $self->no_metadata) { + $sync = 1; + copy($db, $db_lock) or die "rev_map_set(@_): ", + "Failed to copy: ", + "$db => $db_lock ($!)\n"; + } else { + rename $db, $db_lock or die "rev_map_set(@_): ", + "Failed to rename: ", + "$db => $db_lock ($!)\n"; + } + + sysopen(my $fh, $db_lock, O_RDWR | O_CREAT) + or croak "Couldn't open $db_lock: $!\n"; + $update_ref eq 'reset' ? _rev_map_reset($fh, $rev, $commit) : + _rev_map_set($fh, $rev, $commit); + if ($sync) { + $fh->flush or die "Couldn't flush $db_lock: $!\n"; + $fh->sync or die "Couldn't sync $db_lock: $!\n"; + } + close $fh or croak $!; + if ($update_ref) { + $_head = $self; + my $note = ""; + $note = " ($update_ref)" if ($update_ref !~ /^\d*$/); + command_noisy('update-ref', '-m', "r$rev$note", + $self->refname, $commit); + } + rename $db_lock, $db or die "rev_map_set(@_): ", "Failed to rename: ", + "$db_lock => $db ($!)\n"; + delete $LOCKFILES{$db_lock}; + if ($update_ref) { + $SIG{INT} = $SIG{HUP} = $SIG{TERM} = $SIG{ALRM} = $SIG{PIPE} = + $SIG{USR1} = $SIG{USR2} = 'DEFAULT'; + kill $sig, $$ if defined $sig; + } +} + +# If want_commit, this will return an array of (rev, commit) where +# commit _must_ be a valid commit in the archive. +# Otherwise, it'll return the max revision (whether or not the +# commit is valid or just a 0x40 placeholder). +sub rev_map_max { + my ($self, $want_commit) = @_; + $self->rebuild; + my ($r, $c) = $self->rev_map_max_norebuild($want_commit); + $want_commit ? ($r, $c) : $r; +} + +sub rev_map_max_norebuild { + my ($self, $want_commit) = @_; + my $map_path = $self->map_path; + stat $map_path or return $want_commit ? (0, undef) : 0; + sysopen(my $fh, $map_path, O_RDONLY) or croak "open: $!"; + binmode $fh or croak "binmode: $!"; + my $size = (stat($fh))[7]; + ($size % 24) == 0 or croak "inconsistent size: $size"; + + if ($size == 0) { + close $fh or croak "close: $!"; + return $want_commit ? (0, undef) : 0; + } + + sysseek($fh, -24, SEEK_END) or croak "seek: $!"; + sysread($fh, my $buf, 24) == 24 or croak "read: $!"; + my ($r, $c) = unpack(rev_map_fmt, $buf); + if ($want_commit && $c eq ('0' x40)) { + if ($size < 48) { + return $want_commit ? (0, undef) : 0; + } + sysseek($fh, -48, SEEK_END) or croak "seek: $!"; + sysread($fh, $buf, 24) == 24 or croak "read: $!"; + ($r, $c) = unpack(rev_map_fmt, $buf); + if ($c eq ('0'x40)) { + croak "Penultimate record is all-zeroes in $map_path"; + } + } + close $fh or croak "close: $!"; + $want_commit ? ($r, $c) : $r; +} + +sub rev_map_get { + my ($self, $rev, $uuid) = @_; + my $map_path = $self->map_path($uuid); + return undef unless -e $map_path; + + sysopen(my $fh, $map_path, O_RDONLY) or croak "open: $!"; + my $c = _rev_map_get($fh, $rev); + close($fh) or croak "close: $!"; + $c +} + +sub _rev_map_get { + my ($fh, $rev) = @_; + + binmode $fh or croak "binmode: $!"; + my $size = (stat($fh))[7]; + ($size % 24) == 0 or croak "inconsistent size: $size"; + + if ($size == 0) { + return undef; + } + + my ($l, $u) = (0, $size - 24); + my ($r, $c, $buf); + + while ($l <= $u) { + my $i = int(($l/24 + $u/24) / 2) * 24; + sysseek($fh, $i, SEEK_SET) or croak "seek: $!"; + sysread($fh, my $buf, 24) == 24 or croak "read: $!"; + my ($r, $c) = unpack(rev_map_fmt, $buf); + + if ($r < $rev) { + $l = $i + 24; + } elsif ($r > $rev) { + $u = $i - 24; + } else { # $r == $rev + return $c eq ('0' x 40) ? undef : $c; + } + } + undef; +} + +# Finds the first svn revision that exists on (if $eq_ok is true) or +# before $rev for the current branch. It will not search any lower +# than $min_rev. Returns the git commit hash and svn revision number +# if found, else (undef, undef). +sub find_rev_before { + my ($self, $rev, $eq_ok, $min_rev) = @_; + --$rev unless $eq_ok; + $min_rev ||= 1; + my $max_rev = $self->rev_map_max; + $rev = $max_rev if ($rev > $max_rev); + while ($rev >= $min_rev) { + if (my $c = $self->rev_map_get($rev)) { + return ($rev, $c); + } + --$rev; + } + return (undef, undef); +} + +# Finds the first svn revision that exists on (if $eq_ok is true) or +# after $rev for the current branch. It will not search any higher +# than $max_rev. Returns the git commit hash and svn revision number +# if found, else (undef, undef). +sub find_rev_after { + my ($self, $rev, $eq_ok, $max_rev) = @_; + ++$rev unless $eq_ok; + $max_rev ||= $self->rev_map_max; + while ($rev <= $max_rev) { + if (my $c = $self->rev_map_get($rev)) { + return ($rev, $c); + } + ++$rev; + } + return (undef, undef); +} + +sub _new { + my ($class, $repo_id, $ref_id, $path) = @_; + unless (defined $repo_id && length $repo_id) { + $repo_id = $Git::SVN::default_repo_id; + } + unless (defined $ref_id && length $ref_id) { + $_prefix = '' unless defined($_prefix); + $_[2] = $ref_id = + "refs/remotes/$_prefix$Git::SVN::default_ref_id"; + } + $_[1] = $repo_id; + my $dir = "$ENV{GIT_DIR}/svn/$ref_id"; + + # Older repos imported by us used $GIT_DIR/svn/foo instead of + # $GIT_DIR/svn/refs/remotes/foo when tracking refs/remotes/foo + if ($ref_id =~ m{^refs/remotes/(.*)}) { + my $old_dir = "$ENV{GIT_DIR}/svn/$1"; + if (-d $old_dir && ! -d $dir) { + $dir = $old_dir; + } + } + + $_[3] = $path = '' unless (defined $path); + mkpath([$dir]); + bless { + ref_id => $ref_id, dir => $dir, index => "$dir/index", + path => $path, config => "$ENV{GIT_DIR}/svn/config", + map_root => "$dir/.rev_map", repo_id => $repo_id }, $class; +} + +# for read-only access of old .rev_db formats +sub unlink_rev_db_symlink { + my ($self) = @_; + my $link = $self->rev_db_path; + $link =~ s/\.[\w-]+$// or croak "missing UUID at the end of $link"; + if (-l $link) { + unlink $link or croak "unlink: $link failed!"; + } +} + +sub rev_db_path { + my ($self, $uuid) = @_; + my $db_path = $self->map_path($uuid); + $db_path =~ s{/\.rev_map\.}{/\.rev_db\.} + or croak "map_path: $db_path does not contain '/.rev_map.' !"; + $db_path; +} + +# the new replacement for .rev_db +sub map_path { + my ($self, $uuid) = @_; + $uuid ||= $self->ra_uuid; + "$self->{map_root}.$uuid"; +} + +sub uri_encode { + my ($f) = @_; + $f =~ s#([^a-zA-Z0-9\*!\:_\./\-])#uc sprintf("%%%02x",ord($1))#eg; + $f +} + +sub uri_decode { + my ($f) = @_; + $f =~ s#%([0-9a-fA-F]{2})#chr(hex($1))#eg; + $f +} + +sub remove_username { + $_[0] =~ s{^([^:]*://)[^@]+@}{$1}; +} + +package Git::SVN::Prompt; +use strict; +use warnings; +require SVN::Core; +use vars qw/$_no_auth_cache $_username/; + +sub simple { + my ($cred, $realm, $default_username, $may_save, $pool) = @_; + $may_save = undef if $_no_auth_cache; + $default_username = $_username if defined $_username; + if (defined $default_username && length $default_username) { + if (defined $realm && length $realm) { + print STDERR "Authentication realm: $realm\n"; + STDERR->flush; + } + $cred->username($default_username); + } else { + username($cred, $realm, $may_save, $pool); + } + $cred->password(_read_password("Password for '" . + $cred->username . "': ", $realm)); + $cred->may_save($may_save); + $SVN::_Core::SVN_NO_ERROR; +} + +sub ssl_server_trust { + my ($cred, $realm, $failures, $cert_info, $may_save, $pool) = @_; + $may_save = undef if $_no_auth_cache; + print STDERR "Error validating server certificate for '$realm':\n"; + { + no warnings 'once'; + # All variables SVN::Auth::SSL::* are used only once, + # so we're shutting up Perl warnings about this. + if ($failures & $SVN::Auth::SSL::UNKNOWNCA) { + print STDERR " - The certificate is not issued ", + "by a trusted authority. Use the\n", + " fingerprint to validate ", + "the certificate manually!\n"; + } + if ($failures & $SVN::Auth::SSL::CNMISMATCH) { + print STDERR " - The certificate hostname ", + "does not match.\n"; + } + if ($failures & $SVN::Auth::SSL::NOTYETVALID) { + print STDERR " - The certificate is not yet valid.\n"; + } + if ($failures & $SVN::Auth::SSL::EXPIRED) { + print STDERR " - The certificate has expired.\n"; + } + if ($failures & $SVN::Auth::SSL::OTHER) { + print STDERR " - The certificate has ", + "an unknown error.\n"; + } + } # no warnings 'once' + printf STDERR + "Certificate information:\n". + " - Hostname: %s\n". + " - Valid: from %s until %s\n". + " - Issuer: %s\n". + " - Fingerprint: %s\n", + map $cert_info->$_, qw(hostname valid_from valid_until + issuer_dname fingerprint); + my $choice; +prompt: + print STDERR $may_save ? + "(R)eject, accept (t)emporarily or accept (p)ermanently? " : + "(R)eject or accept (t)emporarily? "; + STDERR->flush; + $choice = lc(substr( || 'R', 0, 1)); + if ($choice =~ /^t$/i) { + $cred->may_save(undef); + } elsif ($choice =~ /^r$/i) { + return -1; + } elsif ($may_save && $choice =~ /^p$/i) { + $cred->may_save($may_save); + } else { + goto prompt; + } + $cred->accepted_failures($failures); + $SVN::_Core::SVN_NO_ERROR; +} + +sub ssl_client_cert { + my ($cred, $realm, $may_save, $pool) = @_; + $may_save = undef if $_no_auth_cache; + print STDERR "Client certificate filename: "; + STDERR->flush; + chomp(my $filename = ); + $cred->cert_file($filename); + $cred->may_save($may_save); + $SVN::_Core::SVN_NO_ERROR; +} + +sub ssl_client_cert_pw { + my ($cred, $realm, $may_save, $pool) = @_; + $may_save = undef if $_no_auth_cache; + $cred->password(_read_password("Password: ", $realm)); + $cred->may_save($may_save); + $SVN::_Core::SVN_NO_ERROR; +} + +sub username { + my ($cred, $realm, $may_save, $pool) = @_; + $may_save = undef if $_no_auth_cache; + if (defined $realm && length $realm) { + print STDERR "Authentication realm: $realm\n"; + } + my $username; + if (defined $_username) { + $username = $_username; + } else { + print STDERR "Username: "; + STDERR->flush; + chomp($username = ); + } + $cred->username($username); + $cred->may_save($may_save); + $SVN::_Core::SVN_NO_ERROR; +} + +sub _read_password { + my ($prompt, $realm) = @_; + my $password = ''; + if (exists $ENV{GIT_ASKPASS}) { + open(PH, "-|", $ENV{GIT_ASKPASS}, $prompt); + $password = ; + $password =~ s/[\012\015]//; # \n\r + close(PH); + } else { + print STDERR $prompt; + STDERR->flush; + require Term::ReadKey; + Term::ReadKey::ReadMode('noecho'); + while (defined(my $key = Term::ReadKey::ReadKey(0))) { + last if $key =~ /[\012\015]/; # \n\r + $password .= $key; + } + Term::ReadKey::ReadMode('restore'); + print STDERR "\n"; + STDERR->flush; + } + $password; +} + +package SVN::Git::Fetcher; +use vars qw/@ISA/; +use strict; +use warnings; +use Carp qw/croak/; +use IO::File qw//; +use vars qw/$_ignore_regex/; + +# file baton members: path, mode_a, mode_b, pool, fh, blob, base +sub new { + my ($class, $git_svn, $switch_path) = @_; + my $self = SVN::Delta::Editor->new; + bless $self, $class; + if (exists $git_svn->{last_commit}) { + $self->{c} = $git_svn->{last_commit}; + $self->{empty_symlinks} = + _mark_empty_symlinks($git_svn, $switch_path); + } + $self->{ignore_regex} = eval { command_oneline('config', '--get', + "svn-remote.$git_svn->{repo_id}.ignore-paths") }; + $self->{empty} = {}; + $self->{dir_prop} = {}; + $self->{file_prop} = {}; + $self->{absent_dir} = {}; + $self->{absent_file} = {}; + $self->{gii} = $git_svn->tmp_index_do(sub { Git::IndexInfo->new }); + $self->{pathnameencoding} = Git::config('svn.pathnameencoding'); + $self; +} + +# this uses the Ra object, so it must be called before do_{switch,update}, +# not inside them (when the Git::SVN::Fetcher object is passed) to +# do_{switch,update} +sub _mark_empty_symlinks { + my ($git_svn, $switch_path) = @_; + my $bool = Git::config_bool('svn.brokenSymlinkWorkaround'); + return {} if (!defined($bool)) || (defined($bool) && ! $bool); + + my %ret; + my ($rev, $cmt) = $git_svn->last_rev_commit; + return {} unless ($rev && $cmt); + + # allow the warning to be printed for each revision we fetch to + # ensure the user sees it. The user can also disable the workaround + # on the repository even while git svn is running and the next + # revision fetched will skip this expensive function. + my $printed_warning; + chomp(my $empty_blob = `git hash-object -t blob --stdin < /dev/null`); + my ($ls, $ctx) = command_output_pipe(qw/ls-tree -r -z/, $cmt); + local $/ = "\0"; + my $pfx = defined($switch_path) ? $switch_path : $git_svn->{path}; + $pfx .= '/' if length($pfx); + while (<$ls>) { + chomp; + s/\A100644 blob $empty_blob\t//o or next; + unless ($printed_warning) { + print STDERR "Scanning for empty symlinks, ", + "this may take a while if you have ", + "many empty files\n", + "You may disable this with `", + "git config svn.brokenSymlinkWorkaround ", + "false'.\n", + "This may be done in a different ", + "terminal without restarting ", + "git svn\n"; + $printed_warning = 1; + } + my $path = $_; + my (undef, $props) = + $git_svn->ra->get_file($pfx.$path, $rev, undef); + if ($props->{'svn:special'}) { + $ret{$path} = 1; + } + } + command_close_pipe($ls, $ctx); + \%ret; +} + +# returns true if a given path is inside a ".git" directory +sub in_dot_git { + $_[0] =~ m{(?:^|/)\.git(?:/|$)}; +} + +# return value: 0 -- don't ignore, 1 -- ignore +sub is_path_ignored { + my ($self, $path) = @_; + return 1 if in_dot_git($path); + return 1 if defined($self->{ignore_regex}) && + $path =~ m!$self->{ignore_regex}!; + return 0 unless defined($_ignore_regex); + return 1 if $path =~ m!$_ignore_regex!o; + return 0; +} + +sub set_path_strip { + my ($self, $path) = @_; + $self->{path_strip} = qr/^\Q$path\E(\/|$)/ if length $path; +} + +sub open_root { + { path => '' }; +} + +sub open_directory { + my ($self, $path, $pb, $rev) = @_; + { path => $path }; +} + +sub git_path { + my ($self, $path) = @_; + if (my $enc = $self->{pathnameencoding}) { + require Encode; + Encode::from_to($path, 'UTF-8', $enc); + } + if ($self->{path_strip}) { + $path =~ s!$self->{path_strip}!! or + die "Failed to strip path '$path' ($self->{path_strip})\n"; + } + $path; +} + +sub delete_entry { + my ($self, $path, $rev, $pb) = @_; + return undef if $self->is_path_ignored($path); + + my $gpath = $self->git_path($path); + return undef if ($gpath eq ''); + + # remove entire directories. + my ($tree) = (command('ls-tree', '-z', $self->{c}, "./$gpath") + =~ /\A040000 tree ([a-f\d]{40})\t\Q$gpath\E\0/); + if ($tree) { + my ($ls, $ctx) = command_output_pipe(qw/ls-tree + -r --name-only -z/, + $tree); + local $/ = "\0"; + while (<$ls>) { + chomp; + my $rmpath = "$gpath/$_"; + $self->{gii}->remove($rmpath); + print "\tD\t$rmpath\n" unless $::_q; + } + print "\tD\t$gpath/\n" unless $::_q; + command_close_pipe($ls, $ctx); + } else { + $self->{gii}->remove($gpath); + print "\tD\t$gpath\n" unless $::_q; + } + $self->{empty}->{$path} = 0; + undef; +} + +sub open_file { + my ($self, $path, $pb, $rev) = @_; + my ($mode, $blob); + + goto out if $self->is_path_ignored($path); + + my $gpath = $self->git_path($path); + ($mode, $blob) = (command('ls-tree', '-z', $self->{c}, "./$gpath") + =~ /\A(\d{6}) blob ([a-f\d]{40})\t\Q$gpath\E\0/); + unless (defined $mode && defined $blob) { + die "$path was not found in commit $self->{c} (r$rev)\n"; + } + if ($mode eq '100644' && $self->{empty_symlinks}->{$path}) { + $mode = '120000'; + } +out: + { path => $path, mode_a => $mode, mode_b => $mode, blob => $blob, + pool => SVN::Pool->new, action => 'M' }; +} + +sub add_file { + my ($self, $path, $pb, $cp_path, $cp_rev) = @_; + my $mode; + + if (!$self->is_path_ignored($path)) { + my ($dir, $file) = ($path =~ m#^(.*?)/?([^/]+)$#); + delete $self->{empty}->{$dir}; + $mode = '100644'; + } + { path => $path, mode_a => $mode, mode_b => $mode, + pool => SVN::Pool->new, action => 'A' }; +} + +sub add_directory { + my ($self, $path, $cp_path, $cp_rev) = @_; + goto out if $self->is_path_ignored($path); + my $gpath = $self->git_path($path); + if ($gpath eq '') { + my ($ls, $ctx) = command_output_pipe(qw/ls-tree + -r --name-only -z/, + $self->{c}); + local $/ = "\0"; + while (<$ls>) { + chomp; + $self->{gii}->remove($_); + print "\tD\t$_\n" unless $::_q; + } + command_close_pipe($ls, $ctx); + $self->{empty}->{$path} = 0; + } + my ($dir, $file) = ($path =~ m#^(.*?)/?([^/]+)$#); + delete $self->{empty}->{$dir}; + $self->{empty}->{$path} = 1; +out: + { path => $path }; +} + +sub change_dir_prop { + my ($self, $db, $prop, $value) = @_; + return undef if $self->is_path_ignored($db->{path}); + $self->{dir_prop}->{$db->{path}} ||= {}; + $self->{dir_prop}->{$db->{path}}->{$prop} = $value; + undef; +} + +sub absent_directory { + my ($self, $path, $pb) = @_; + return undef if $self->is_path_ignored($path); + $self->{absent_dir}->{$pb->{path}} ||= []; + push @{$self->{absent_dir}->{$pb->{path}}}, $path; + undef; +} + +sub absent_file { + my ($self, $path, $pb) = @_; + return undef if $self->is_path_ignored($path); + $self->{absent_file}->{$pb->{path}} ||= []; + push @{$self->{absent_file}->{$pb->{path}}}, $path; + undef; +} + +sub change_file_prop { + my ($self, $fb, $prop, $value) = @_; + return undef if $self->is_path_ignored($fb->{path}); + if ($prop eq 'svn:executable') { + if ($fb->{mode_b} != 120000) { + $fb->{mode_b} = defined $value ? 100755 : 100644; + } + } elsif ($prop eq 'svn:special') { + $fb->{mode_b} = defined $value ? 120000 : 100644; + } else { + $self->{file_prop}->{$fb->{path}} ||= {}; + $self->{file_prop}->{$fb->{path}}->{$prop} = $value; + } + undef; +} + +sub apply_textdelta { + my ($self, $fb, $exp) = @_; + return undef if $self->is_path_ignored($fb->{path}); + my $fh = $::_repository->temp_acquire('svn_delta'); + # $fh gets auto-closed() by SVN::TxDelta::apply(), + # (but $base does not,) so dup() it for reading in close_file + open my $dup, '<&', $fh or croak $!; + my $base = $::_repository->temp_acquire('git_blob'); + + if ($fb->{blob}) { + my ($base_is_link, $size); + + if ($fb->{mode_a} eq '120000' && + ! $self->{empty_symlinks}->{$fb->{path}}) { + print $base 'link ' or die "print $!\n"; + $base_is_link = 1; + } + retry: + $size = $::_repository->cat_blob($fb->{blob}, $base); + die "Failed to read object $fb->{blob}" if ($size < 0); + + if (defined $exp) { + seek $base, 0, 0 or croak $!; + my $got = ::md5sum($base); + if ($got ne $exp) { + my $err = "Checksum mismatch: ". + "$fb->{path} $fb->{blob}\n" . + "expected: $exp\n" . + " got: $got\n"; + if ($base_is_link) { + warn $err, + "Retrying... (possibly ", + "a bad symlink from SVN)\n"; + $::_repository->temp_reset($base); + $base_is_link = 0; + goto retry; + } + die $err; + } + } + } + seek $base, 0, 0 or croak $!; + $fb->{fh} = $fh; + $fb->{base} = $base; + [ SVN::TxDelta::apply($base, $dup, undef, $fb->{path}, $fb->{pool}) ]; +} + +sub close_file { + my ($self, $fb, $exp) = @_; + return undef if $self->is_path_ignored($fb->{path}); + + my $hash; + my $path = $self->git_path($fb->{path}); + if (my $fh = $fb->{fh}) { + if (defined $exp) { + seek($fh, 0, 0) or croak $!; + my $got = ::md5sum($fh); + if ($got ne $exp) { + die "Checksum mismatch: $path\n", + "expected: $exp\n got: $got\n"; + } + } + if ($fb->{mode_b} == 120000) { + sysseek($fh, 0, 0) or croak $!; + my $rd = sysread($fh, my $buf, 5); + + if (!defined $rd) { + croak "sysread: $!\n"; + } elsif ($rd == 0) { + warn "$path has mode 120000", + " but it points to nothing\n", + "converting to an empty file with mode", + " 100644\n"; + $fb->{mode_b} = '100644'; + } elsif ($buf ne 'link ') { + warn "$path has mode 120000", + " but is not a link\n"; + } else { + my $tmp_fh = $::_repository->temp_acquire( + 'svn_hash'); + my $res; + while ($res = sysread($fh, my $str, 1024)) { + my $out = syswrite($tmp_fh, $str, $res); + defined($out) && $out == $res + or croak("write ", + Git::temp_path($tmp_fh), + ": $!\n"); + } + defined $res or croak $!; + + ($fh, $tmp_fh) = ($tmp_fh, $fh); + Git::temp_release($tmp_fh, 1); + } + } + + $hash = $::_repository->hash_and_insert_object( + Git::temp_path($fh)); + $hash =~ /^[a-f\d]{40}$/ or die "not a sha1: $hash\n"; + + Git::temp_release($fb->{base}, 1); + Git::temp_release($fh, 1); + } else { + $hash = $fb->{blob} or die "no blob information\n"; + } + $fb->{pool}->clear; + $self->{gii}->update($fb->{mode_b}, $hash, $path) or croak $!; + print "\t$fb->{action}\t$path\n" if $fb->{action} && ! $::_q; + undef; +} + +sub abort_edit { + my $self = shift; + $self->{nr} = $self->{gii}->{nr}; + delete $self->{gii}; + $self->SUPER::abort_edit(@_); +} + +sub close_edit { + my $self = shift; + $self->{git_commit_ok} = 1; + $self->{nr} = $self->{gii}->{nr}; + delete $self->{gii}; + $self->SUPER::close_edit(@_); +} + +package SVN::Git::Editor; +use vars qw/@ISA $_rmdir $_cp_similarity $_find_copies_harder $_rename_limit/; +use strict; +use warnings; +use Carp qw/croak/; +use IO::File; + +sub new { + my ($class, $opts) = @_; + foreach (qw/svn_path r ra tree_a tree_b log editor_cb/) { + die "$_ required!\n" unless (defined $opts->{$_}); + } + + my $pool = SVN::Pool->new; + my $mods = generate_diff($opts->{tree_a}, $opts->{tree_b}); + my $types = check_diff_paths($opts->{ra}, $opts->{svn_path}, + $opts->{r}, $mods); + + # $opts->{ra} functions should not be used after this: + my @ce = $opts->{ra}->get_commit_editor($opts->{log}, + $opts->{editor_cb}, $pool); + my $self = SVN::Delta::Editor->new(@ce, $pool); + bless $self, $class; + foreach (qw/svn_path r tree_a tree_b/) { + $self->{$_} = $opts->{$_}; + } + $self->{url} = $opts->{ra}->{url}; + $self->{mods} = $mods; + $self->{types} = $types; + $self->{pool} = $pool; + $self->{bat} = { '' => $self->open_root($self->{r}, $self->{pool}) }; + $self->{rm} = { }; + $self->{path_prefix} = length $self->{svn_path} ? + "$self->{svn_path}/" : ''; + $self->{config} = $opts->{config}; + $self->{mergeinfo} = $opts->{mergeinfo}; + return $self; +} + +sub generate_diff { + my ($tree_a, $tree_b) = @_; + my @diff_tree = qw(diff-tree -z -r); + if ($_cp_similarity) { + push @diff_tree, "-C$_cp_similarity"; + } else { + push @diff_tree, '-C'; + } + push @diff_tree, '--find-copies-harder' if $_find_copies_harder; + push @diff_tree, "-l$_rename_limit" if defined $_rename_limit; + push @diff_tree, $tree_a, $tree_b; + my ($diff_fh, $ctx) = command_output_pipe(@diff_tree); + local $/ = "\0"; + my $state = 'meta'; + my @mods; + while (<$diff_fh>) { + chomp $_; # this gets rid of the trailing "\0" + if ($state eq 'meta' && /^:(\d{6})\s(\d{6})\s + ($::sha1)\s($::sha1)\s + ([MTCRAD])\d*$/xo) { + push @mods, { mode_a => $1, mode_b => $2, + sha1_a => $3, sha1_b => $4, + chg => $5 }; + if ($5 =~ /^(?:C|R)$/) { + $state = 'file_a'; + } else { + $state = 'file_b'; + } + } elsif ($state eq 'file_a') { + my $x = $mods[$#mods] or croak "Empty array\n"; + if ($x->{chg} !~ /^(?:C|R)$/) { + croak "Error parsing $_, $x->{chg}\n"; + } + $x->{file_a} = $_; + $state = 'file_b'; + } elsif ($state eq 'file_b') { + my $x = $mods[$#mods] or croak "Empty array\n"; + if (exists $x->{file_a} && $x->{chg} !~ /^(?:C|R)$/) { + croak "Error parsing $_, $x->{chg}\n"; + } + if (!exists $x->{file_a} && $x->{chg} =~ /^(?:C|R)$/) { + croak "Error parsing $_, $x->{chg}\n"; + } + $x->{file_b} = $_; + $state = 'meta'; + } else { + croak "Error parsing $_\n"; + } + } + command_close_pipe($diff_fh, $ctx); + \@mods; +} + +sub check_diff_paths { + my ($ra, $pfx, $rev, $mods) = @_; + my %types; + $pfx .= '/' if length $pfx; + + sub type_diff_paths { + my ($ra, $types, $path, $rev) = @_; + my @p = split m#/+#, $path; + my $c = shift @p; + unless (defined $types->{$c}) { + $types->{$c} = $ra->check_path($c, $rev); + } + while (@p) { + $c .= '/' . shift @p; + next if defined $types->{$c}; + $types->{$c} = $ra->check_path($c, $rev); + } + } + + foreach my $m (@$mods) { + foreach my $f (qw/file_a file_b/) { + next unless defined $m->{$f}; + my ($dir) = ($m->{$f} =~ m#^(.*?)/?(?:[^/]+)$#); + if (length $pfx.$dir && ! defined $types{$dir}) { + type_diff_paths($ra, \%types, $pfx.$dir, $rev); + } + } + } + \%types; +} + +sub split_path { + return ($_[0] =~ m#^(.*?)/?([^/]+)$#); +} + +sub repo_path { + my ($self, $path) = @_; + if (my $enc = $self->{pathnameencoding}) { + require Encode; + Encode::from_to($path, $enc, 'UTF-8'); + } + $self->{path_prefix}.(defined $path ? $path : ''); +} + +sub url_path { + my ($self, $path) = @_; + if ($self->{url} =~ m#^https?://#) { + $path =~ s!([^~a-zA-Z0-9_./-])!uc sprintf("%%%02x",ord($1))!eg; + } + $self->{url} . '/' . $self->repo_path($path); +} + +sub rmdirs { + my ($self) = @_; + my $rm = $self->{rm}; + delete $rm->{''}; # we never delete the url we're tracking + return unless %$rm; + + foreach (keys %$rm) { + my @d = split m#/#, $_; + my $c = shift @d; + $rm->{$c} = 1; + while (@d) { + $c .= '/' . shift @d; + $rm->{$c} = 1; + } + } + delete $rm->{$self->{svn_path}}; + delete $rm->{''}; # we never delete the url we're tracking + return unless %$rm; + + my ($fh, $ctx) = command_output_pipe(qw/ls-tree --name-only -r -z/, + $self->{tree_b}); + local $/ = "\0"; + while (<$fh>) { + chomp; + my @dn = split m#/#, $_; + while (pop @dn) { + delete $rm->{join '/', @dn}; + } + unless (%$rm) { + close $fh; + return; + } + } + command_close_pipe($fh, $ctx); + + my ($r, $p, $bat) = ($self->{r}, $self->{pool}, $self->{bat}); + foreach my $d (sort { $b =~ tr#/#/# <=> $a =~ tr#/#/# } keys %$rm) { + $self->close_directory($bat->{$d}, $p); + my ($dn) = ($d =~ m#^(.*?)/?(?:[^/]+)$#); + print "\tD+\t$d/\n" unless $::_q; + $self->SUPER::delete_entry($d, $r, $bat->{$dn}, $p); + delete $bat->{$d}; + } +} + +sub open_or_add_dir { + my ($self, $full_path, $baton) = @_; + my $t = $self->{types}->{$full_path}; + if (!defined $t) { + die "$full_path not known in r$self->{r} or we have a bug!\n"; + } + { + no warnings 'once'; + # SVN::Node::none and SVN::Node::file are used only once, + # so we're shutting up Perl's warnings about them. + if ($t == $SVN::Node::none) { + return $self->add_directory($full_path, $baton, + undef, -1, $self->{pool}); + } elsif ($t == $SVN::Node::dir) { + return $self->open_directory($full_path, $baton, + $self->{r}, $self->{pool}); + } # no warnings 'once' + print STDERR "$full_path already exists in repository at ", + "r$self->{r} and it is not a directory (", + ($t == $SVN::Node::file ? 'file' : 'unknown'),"/$t)\n"; + } # no warnings 'once' + exit 1; +} + +sub ensure_path { + my ($self, $path) = @_; + my $bat = $self->{bat}; + my $repo_path = $self->repo_path($path); + return $bat->{''} unless (length $repo_path); + my @p = split m#/+#, $repo_path; + my $c = shift @p; + $bat->{$c} ||= $self->open_or_add_dir($c, $bat->{''}); + while (@p) { + my $c0 = $c; + $c .= '/' . shift @p; + $bat->{$c} ||= $self->open_or_add_dir($c, $bat->{$c0}); + } + return $bat->{$c}; +} + +# Subroutine to convert a globbing pattern to a regular expression. +# From perl cookbook. +sub glob2pat { + my $globstr = shift; + my %patmap = ('*' => '.*', '?' => '.', '[' => '[', ']' => ']'); + $globstr =~ s{(.)} { $patmap{$1} || "\Q$1" }ge; + return '^' . $globstr . '$'; +} + +sub check_autoprop { + my ($self, $pattern, $properties, $file, $fbat) = @_; + # Convert the globbing pattern to a regular expression. + my $regex = glob2pat($pattern); + # Check if the pattern matches the file name. + if($file =~ m/($regex)/) { + # Parse the list of properties to set. + my @props = split(/;/, $properties); + foreach my $prop (@props) { + # Parse 'name=value' syntax and set the property. + if ($prop =~ /([^=]+)=(.*)/) { + my ($n,$v) = ($1,$2); + for ($n, $v) { + s/^\s+//; s/\s+$//; + } + $self->change_file_prop($fbat, $n, $v); + } + } + } +} + +sub apply_autoprops { + my ($self, $file, $fbat) = @_; + my $conf_t = ${$self->{config}}{'config'}; + no warnings 'once'; + # Check [miscellany]/enable-auto-props in svn configuration. + if (SVN::_Core::svn_config_get_bool( + $conf_t, + $SVN::_Core::SVN_CONFIG_SECTION_MISCELLANY, + $SVN::_Core::SVN_CONFIG_OPTION_ENABLE_AUTO_PROPS, + 0)) { + # Auto-props are enabled. Enumerate them to look for matches. + my $callback = sub { + $self->check_autoprop($_[0], $_[1], $file, $fbat); + }; + SVN::_Core::svn_config_enumerate( + $conf_t, + $SVN::_Core::SVN_CONFIG_SECTION_AUTO_PROPS, + $callback); + } +} + +sub A { + my ($self, $m) = @_; + my ($dir, $file) = split_path($m->{file_b}); + my $pbat = $self->ensure_path($dir); + my $fbat = $self->add_file($self->repo_path($m->{file_b}), $pbat, + undef, -1); + print "\tA\t$m->{file_b}\n" unless $::_q; + $self->apply_autoprops($file, $fbat); + $self->chg_file($fbat, $m); + $self->close_file($fbat,undef,$self->{pool}); +} + +sub C { + my ($self, $m) = @_; + my ($dir, $file) = split_path($m->{file_b}); + my $pbat = $self->ensure_path($dir); + my $fbat = $self->add_file($self->repo_path($m->{file_b}), $pbat, + $self->url_path($m->{file_a}), $self->{r}); + print "\tC\t$m->{file_a} => $m->{file_b}\n" unless $::_q; + $self->chg_file($fbat, $m); + $self->close_file($fbat,undef,$self->{pool}); +} + +sub delete_entry { + my ($self, $path, $pbat) = @_; + my $rpath = $self->repo_path($path); + my ($dir, $file) = split_path($rpath); + $self->{rm}->{$dir} = 1; + $self->SUPER::delete_entry($rpath, $self->{r}, $pbat, $self->{pool}); +} + +sub R { + my ($self, $m) = @_; + my ($dir, $file) = split_path($m->{file_b}); + my $pbat = $self->ensure_path($dir); + my $fbat = $self->add_file($self->repo_path($m->{file_b}), $pbat, + $self->url_path($m->{file_a}), $self->{r}); + print "\tR\t$m->{file_a} => $m->{file_b}\n" unless $::_q; + $self->apply_autoprops($file, $fbat); + $self->chg_file($fbat, $m); + $self->close_file($fbat,undef,$self->{pool}); + + ($dir, $file) = split_path($m->{file_a}); + $pbat = $self->ensure_path($dir); + $self->delete_entry($m->{file_a}, $pbat); +} + +sub M { + my ($self, $m) = @_; + my ($dir, $file) = split_path($m->{file_b}); + my $pbat = $self->ensure_path($dir); + my $fbat = $self->open_file($self->repo_path($m->{file_b}), + $pbat,$self->{r},$self->{pool}); + print "\t$m->{chg}\t$m->{file_b}\n" unless $::_q; + $self->chg_file($fbat, $m); + $self->close_file($fbat,undef,$self->{pool}); +} + +sub T { shift->M(@_) } + +sub change_file_prop { + my ($self, $fbat, $pname, $pval) = @_; + $self->SUPER::change_file_prop($fbat, $pname, $pval, $self->{pool}); +} + +sub change_dir_prop { + my ($self, $pbat, $pname, $pval) = @_; + $self->SUPER::change_dir_prop($pbat, $pname, $pval, $self->{pool}); +} + +sub _chg_file_get_blob ($$$$) { + my ($self, $fbat, $m, $which) = @_; + my $fh = $::_repository->temp_acquire("git_blob_$which"); + if ($m->{"mode_$which"} =~ /^120/) { + print $fh 'link ' or croak $!; + $self->change_file_prop($fbat,'svn:special','*'); + } elsif ($m->{mode_a} =~ /^120/ && $m->{"mode_$which"} !~ /^120/) { + $self->change_file_prop($fbat,'svn:special',undef); + } + my $blob = $m->{"sha1_$which"}; + return ($fh,) if ($blob =~ /^0{40}$/); + my $size = $::_repository->cat_blob($blob, $fh); + croak "Failed to read object $blob" if ($size < 0); + $fh->flush == 0 or croak $!; + seek $fh, 0, 0 or croak $!; + + my $exp = ::md5sum($fh); + seek $fh, 0, 0 or croak $!; + return ($fh, $exp); +} + +sub chg_file { + my ($self, $fbat, $m) = @_; + if ($m->{mode_b} =~ /755$/ && $m->{mode_a} !~ /755$/) { + $self->change_file_prop($fbat,'svn:executable','*'); + } elsif ($m->{mode_b} !~ /755$/ && $m->{mode_a} =~ /755$/) { + $self->change_file_prop($fbat,'svn:executable',undef); + } + my ($fh_a, $exp_a) = _chg_file_get_blob $self, $fbat, $m, 'a'; + my ($fh_b, $exp_b) = _chg_file_get_blob $self, $fbat, $m, 'b'; + my $pool = SVN::Pool->new; + my $atd = $self->apply_textdelta($fbat, $exp_a, $pool); + if (-s $fh_a) { + my $txstream = SVN::TxDelta::new ($fh_a, $fh_b, $pool); + my $res = SVN::TxDelta::send_txstream($txstream, @$atd, $pool); + if (defined $res) { + die "Unexpected result from send_txstream: $res\n", + "(SVN::Core::VERSION: $SVN::Core::VERSION)\n"; + } + } else { + my $got = SVN::TxDelta::send_stream($fh_b, @$atd, $pool); + die "Checksum mismatch\nexpected: $exp_b\ngot: $got\n" + if ($got ne $exp_b); + } + Git::temp_release($fh_b, 1); + Git::temp_release($fh_a, 1); + $pool->clear; +} + +sub D { + my ($self, $m) = @_; + my ($dir, $file) = split_path($m->{file_b}); + my $pbat = $self->ensure_path($dir); + print "\tD\t$m->{file_b}\n" unless $::_q; + $self->delete_entry($m->{file_b}, $pbat); +} + +sub close_edit { + my ($self) = @_; + my ($p,$bat) = ($self->{pool}, $self->{bat}); + foreach (sort { $b =~ tr#/#/# <=> $a =~ tr#/#/# } keys %$bat) { + next if $_ eq ''; + $self->close_directory($bat->{$_}, $p); + } + $self->close_directory($bat->{''}, $p); + $self->SUPER::close_edit($p); + $p->clear; +} + +sub abort_edit { + my ($self) = @_; + $self->SUPER::abort_edit($self->{pool}); +} + +sub DESTROY { + my $self = shift; + $self->SUPER::DESTROY(@_); + $self->{pool}->clear; +} + +# this drives the editor +sub apply_diff { + my ($self) = @_; + my $mods = $self->{mods}; + my %o = ( D => 1, R => 0, C => -1, A => 3, M => 3, T => 3 ); + foreach my $m (sort { $o{$a->{chg}} <=> $o{$b->{chg}} } @$mods) { + my $f = $m->{chg}; + if (defined $o{$f}) { + $self->$f($m); + } else { + fatal("Invalid change type: $f"); + } + } + + if (defined($self->{mergeinfo})) { + $self->change_dir_prop($self->{bat}{''}, "svn:mergeinfo", + $self->{mergeinfo}); + } + $self->rmdirs if $_rmdir; + if (@$mods == 0) { + $self->abort_edit; + } else { + $self->close_edit; + } + return scalar @$mods; +} + +package Git::SVN::Ra; +use vars qw/@ISA $config_dir $_log_window_size/; +use strict; +use warnings; +my ($ra_invalid, $can_do_switch, %ignored_err, $RA); + +BEGIN { + # enforce temporary pool usage for some simple functions + no strict 'refs'; + for my $f (qw/rev_proplist get_latest_revnum get_uuid get_repos_root + get_file/) { + my $SUPER = "SUPER::$f"; + *$f = sub { + my $self = shift; + my $pool = SVN::Pool->new; + my @ret = $self->$SUPER(@_,$pool); + $pool->clear; + wantarray ? @ret : $ret[0]; + }; + } +} + +sub _auth_providers () { + [ + SVN::Client::get_simple_provider(), + SVN::Client::get_ssl_server_trust_file_provider(), + SVN::Client::get_simple_prompt_provider( + \&Git::SVN::Prompt::simple, 2), + SVN::Client::get_ssl_client_cert_file_provider(), + SVN::Client::get_ssl_client_cert_prompt_provider( + \&Git::SVN::Prompt::ssl_client_cert, 2), + SVN::Client::get_ssl_client_cert_pw_file_provider(), + SVN::Client::get_ssl_client_cert_pw_prompt_provider( + \&Git::SVN::Prompt::ssl_client_cert_pw, 2), + SVN::Client::get_username_provider(), + SVN::Client::get_ssl_server_trust_prompt_provider( + \&Git::SVN::Prompt::ssl_server_trust), + SVN::Client::get_username_prompt_provider( + \&Git::SVN::Prompt::username, 2) + ] +} + +sub escape_uri_only { + my ($uri) = @_; + my @tmp; + foreach (split m{/}, $uri) { + s/([^~\w.%+-]|%(?![a-fA-F0-9]{2}))/sprintf("%%%02X",ord($1))/eg; + push @tmp, $_; + } + join('/', @tmp); +} + +sub escape_url { + my ($url) = @_; + if ($url =~ m#^(https?)://([^/]+)(.*)$#) { + my ($scheme, $domain, $uri) = ($1, $2, escape_uri_only($3)); + $url = "$scheme://$domain$uri"; + } + $url; +} + +sub new { + my ($class, $url) = @_; + $url =~ s!/+$!!; + return $RA if ($RA && $RA->{url} eq $url); + + ::_req_svn(); + + SVN::_Core::svn_config_ensure($config_dir, undef); + my ($baton, $callbacks) = SVN::Core::auth_open_helper(_auth_providers); + my $config = SVN::Core::config_get_config($config_dir); + $RA = undef; + my $dont_store_passwords = 1; + my $conf_t = ${$config}{'config'}; + { + no warnings 'once'; + # The usage of $SVN::_Core::SVN_CONFIG_* variables + # produces warnings that variables are used only once. + # I had not found the better way to shut them up, so + # the warnings of type 'once' are disabled in this block. + if (SVN::_Core::svn_config_get_bool($conf_t, + $SVN::_Core::SVN_CONFIG_SECTION_AUTH, + $SVN::_Core::SVN_CONFIG_OPTION_STORE_PASSWORDS, + 1) == 0) { + SVN::_Core::svn_auth_set_parameter($baton, + $SVN::_Core::SVN_AUTH_PARAM_DONT_STORE_PASSWORDS, + bless (\$dont_store_passwords, "_p_void")); + } + if (SVN::_Core::svn_config_get_bool($conf_t, + $SVN::_Core::SVN_CONFIG_SECTION_AUTH, + $SVN::_Core::SVN_CONFIG_OPTION_STORE_AUTH_CREDS, + 1) == 0) { + $Git::SVN::Prompt::_no_auth_cache = 1; + } + } # no warnings 'once' + my $self = SVN::Ra->new(url => escape_url($url), auth => $baton, + config => $config, + pool => SVN::Pool->new, + auth_provider_callbacks => $callbacks); + $self->{url} = $url; + $self->{svn_path} = $url; + $self->{repos_root} = $self->get_repos_root; + $self->{svn_path} =~ s#^\Q$self->{repos_root}\E(/|$)##; + $self->{cache} = { check_path => { r => 0, data => {} }, + get_dir => { r => 0, data => {} } }; + $RA = bless $self, $class; +} + +sub check_path { + my ($self, $path, $r) = @_; + my $cache = $self->{cache}->{check_path}; + if ($r == $cache->{r} && exists $cache->{data}->{$path}) { + return $cache->{data}->{$path}; + } + my $pool = SVN::Pool->new; + my $t = $self->SUPER::check_path($path, $r, $pool); + $pool->clear; + if ($r != $cache->{r}) { + %{$cache->{data}} = (); + $cache->{r} = $r; + } + $cache->{data}->{$path} = $t; +} + +sub get_dir { + my ($self, $dir, $r) = @_; + my $cache = $self->{cache}->{get_dir}; + if ($r == $cache->{r}) { + if (my $x = $cache->{data}->{$dir}) { + return wantarray ? @$x : $x->[0]; + } + } + my $pool = SVN::Pool->new; + my ($d, undef, $props) = $self->SUPER::get_dir($dir, $r, $pool); + my %dirents = map { $_ => { kind => $d->{$_}->kind } } keys %$d; + $pool->clear; + if ($r != $cache->{r}) { + %{$cache->{data}} = (); + $cache->{r} = $r; + } + $cache->{data}->{$dir} = [ \%dirents, $r, $props ]; + wantarray ? (\%dirents, $r, $props) : \%dirents; +} + +sub DESTROY { + # do not call the real DESTROY since we store ourselves in $RA +} + +# get_log(paths, start, end, limit, +# discover_changed_paths, strict_node_history, receiver) +sub get_log { + my ($self, @args) = @_; + my $pool = SVN::Pool->new; + + # svn_log_changed_path_t objects passed to get_log are likely to be + # overwritten even if only the refs are copied to an external variable, + # so we should dup the structures in their entirety. Using an + # externally passed pool (instead of our temporary and quickly cleared + # pool in Git::SVN::Ra) does not help matters at all... + my $receiver = pop @args; + my $prefix = "/".$self->{svn_path}; + $prefix =~ s#/+($)##; + my $prefix_regex = qr#^\Q$prefix\E#; + push(@args, sub { + my ($paths) = $_[0]; + return &$receiver(@_) unless $paths; + $_[0] = (); + foreach my $p (keys %$paths) { + my $i = $paths->{$p}; + # Make path relative to our url, not repos_root + $p =~ s/$prefix_regex//; + my %s = map { $_ => $i->$_; } + qw/copyfrom_path copyfrom_rev action/; + if ($s{'copyfrom_path'}) { + $s{'copyfrom_path'} =~ s/$prefix_regex//; + } + $_[0]{$p} = \%s; + } + &$receiver(@_); + }); + + + # the limit parameter was not supported in SVN 1.1.x, so we + # drop it. Therefore, the receiver callback passed to it + # is made aware of this limitation by being wrapped if + # the limit passed to is being wrapped. + if ($SVN::Core::VERSION le '1.2.0') { + my $limit = splice(@args, 3, 1); + if ($limit > 0) { + my $receiver = pop @args; + push(@args, sub { &$receiver(@_) if (--$limit >= 0) }); + } + } + my $ret = $self->SUPER::get_log(@args, $pool); + $pool->clear; + $ret; +} + +sub trees_match { + my ($self, $url1, $rev1, $url2, $rev2) = @_; + my $ctx = SVN::Client->new(auth => _auth_providers); + my $out = IO::File->new_tmpfile; + + # older SVN (1.1.x) doesn't take $pool as the last parameter for + # $ctx->diff(), so we'll create a default one + my $pool = SVN::Pool->new_default_sub; + + $ra_invalid = 1; # this will open a new SVN::Ra connection to $url1 + $ctx->diff([], $url1, $rev1, $url2, $rev2, 1, 1, 0, $out, $out); + $out->flush; + my $ret = (($out->stat)[7] == 0); + close $out or croak $!; + + $ret; +} + +sub get_commit_editor { + my ($self, $log, $cb, $pool) = @_; + my @lock = $SVN::Core::VERSION ge '1.2.0' ? (undef, 0) : (); + $self->SUPER::get_commit_editor($log, $cb, @lock, $pool); +} + +sub gs_do_update { + my ($self, $rev_a, $rev_b, $gs, $editor) = @_; + my $new = ($rev_a == $rev_b); + my $path = $gs->{path}; + + if ($new && -e $gs->{index}) { + unlink $gs->{index} or die + "Couldn't unlink index: $gs->{index}: $!\n"; + } + my $pool = SVN::Pool->new; + $editor->set_path_strip($path); + my (@pc) = split m#/#, $path; + my $reporter = $self->do_update($rev_b, (@pc ? shift @pc : ''), + 1, $editor, $pool); + my @lock = $SVN::Core::VERSION ge '1.2.0' ? (undef) : (); + + # Since we can't rely on svn_ra_reparent being available, we'll + # just have to do some magic with set_path to make it so + # we only want a partial path. + my $sp = ''; + my $final = join('/', @pc); + while (@pc) { + $reporter->set_path($sp, $rev_b, 0, @lock, $pool); + $sp .= '/' if length $sp; + $sp .= shift @pc; + } + die "BUG: '$sp' != '$final'\n" if ($sp ne $final); + + $reporter->set_path($sp, $rev_a, $new, @lock, $pool); + + $reporter->finish_report($pool); + $pool->clear; + $editor->{git_commit_ok}; +} + +# this requires SVN 1.4.3 or later (do_switch didn't work before 1.4.3, and +# svn_ra_reparent didn't work before 1.4) +sub gs_do_switch { + my ($self, $rev_a, $rev_b, $gs, $url_b, $editor) = @_; + my $path = $gs->{path}; + my $pool = SVN::Pool->new; + + my $full_url = $self->{url}; + my $old_url = $full_url; + $full_url .= '/' . $path if length $path; + my ($ra, $reparented); + + if ($old_url =~ m#^svn(\+ssh)?://# || + ($full_url =~ m#^https?://# && + escape_url($full_url) ne $full_url)) { + $_[0] = undef; + $self = undef; + $RA = undef; + $ra = Git::SVN::Ra->new($full_url); + $ra_invalid = 1; + } elsif ($old_url ne $full_url) { + SVN::_Ra::svn_ra_reparent($self->{session}, $full_url, $pool); + $self->{url} = $full_url; + $reparented = 1; + } + + $ra ||= $self; + $url_b = escape_url($url_b); + my $reporter = $ra->do_switch($rev_b, '', 1, $url_b, $editor, $pool); + my @lock = $SVN::Core::VERSION ge '1.2.0' ? (undef) : (); + $reporter->set_path('', $rev_a, 0, @lock, $pool); + $reporter->finish_report($pool); + + if ($reparented) { + SVN::_Ra::svn_ra_reparent($self->{session}, $old_url, $pool); + $self->{url} = $old_url; + } + + $pool->clear; + $editor->{git_commit_ok}; +} + +sub longest_common_path { + my ($gsv, $globs) = @_; + my %common; + my $common_max = scalar @$gsv; + + foreach my $gs (@$gsv) { + my @tmp = split m#/#, $gs->{path}; + my $p = ''; + foreach (@tmp) { + $p .= length($p) ? "/$_" : $_; + $common{$p} ||= 0; + $common{$p}++; + } + } + $globs ||= []; + $common_max += scalar @$globs; + foreach my $glob (@$globs) { + my @tmp = split m#/#, $glob->{path}->{left}; + my $p = ''; + foreach (@tmp) { + $p .= length($p) ? "/$_" : $_; + $common{$p} ||= 0; + $common{$p}++; + } + } + + my $longest_path = ''; + foreach (sort {length $b <=> length $a} keys %common) { + if ($common{$_} == $common_max) { + $longest_path = $_; + last; + } + } + $longest_path; +} + +sub gs_fetch_loop_common { + my ($self, $base, $head, $gsv, $globs) = @_; + return if ($base > $head); + my $inc = $_log_window_size; + my ($min, $max) = ($base, $head < $base + $inc ? $head : $base + $inc); + my $longest_path = longest_common_path($gsv, $globs); + my $ra_url = $self->{url}; + my $find_trailing_edge; + while (1) { + my %revs; + my $err; + my $err_handler = $SVN::Error::handler; + $SVN::Error::handler = sub { + ($err) = @_; + skip_unknown_revs($err); + }; + sub _cb { + my ($paths, $r, $author, $date, $log) = @_; + [ $paths, + { author => $author, date => $date, log => $log } ]; + } + $self->get_log([$longest_path], $min, $max, 0, 1, 1, + sub { $revs{$_[1]} = _cb(@_) }); + if ($err) { + print "Checked through r$max\r"; + } else { + $find_trailing_edge = 1; + } + if ($err and $find_trailing_edge) { + print STDERR "Path '$longest_path' ", + "was probably deleted:\n", + $err->expanded_message, + "\nWill attempt to follow ", + "revisions r$min .. r$max ", + "committed before the deletion\n"; + my $hi = $max; + while (--$hi >= $min) { + my $ok; + $self->get_log([$longest_path], $min, $hi, + 0, 1, 1, sub { + $ok = $_[1]; + $revs{$_[1]} = _cb(@_) }); + if ($ok) { + print STDERR "r$min .. r$ok OK\n"; + last; + } + } + $find_trailing_edge = 0; + } + $SVN::Error::handler = $err_handler; + + my %exists = map { $_->{path} => $_ } @$gsv; + foreach my $r (sort {$a <=> $b} keys %revs) { + my ($paths, $logged) = @{$revs{$r}}; + + foreach my $gs ($self->match_globs(\%exists, $paths, + $globs, $r)) { + if ($gs->rev_map_max >= $r) { + next; + } + next unless $gs->match_paths($paths, $r); + $gs->{logged_rev_props} = $logged; + if (my $last_commit = $gs->last_commit) { + $gs->assert_index_clean($last_commit); + } + my $log_entry = $gs->do_fetch($paths, $r); + if ($log_entry) { + $gs->do_git_commit($log_entry); + } + $INDEX_FILES{$gs->{index}} = 1; + } + foreach my $g (@$globs) { + my $k = "svn-remote.$g->{remote}." . + "$g->{t}-maxRev"; + Git::SVN::tmp_config($k, $r); + } + if ($ra_invalid) { + $_[0] = undef; + $self = undef; + $RA = undef; + $self = Git::SVN::Ra->new($ra_url); + $ra_invalid = undef; + } + } + # pre-fill the .rev_db since it'll eventually get filled in + # with '0' x40 if something new gets committed + foreach my $gs (@$gsv) { + next if $gs->rev_map_max >= $max; + next if defined $gs->rev_map_get($max); + $gs->rev_map_set($max, 0 x40); + } + foreach my $g (@$globs) { + my $k = "svn-remote.$g->{remote}.$g->{t}-maxRev"; + Git::SVN::tmp_config($k, $max); + } + last if $max >= $head; + $min = $max + 1; + $max += $inc; + $max = $head if ($max > $head); + } + Git::SVN::gc(); +} + +sub get_dir_globbed { + my ($self, $left, $depth, $r) = @_; + + my @x = eval { $self->get_dir($left, $r) }; + return unless scalar @x == 3; + my $dirents = $x[0]; + my @finalents; + foreach my $de (keys %$dirents) { + next if $dirents->{$de}->{kind} != $SVN::Node::dir; + if ($depth > 1) { + my @args = ("$left/$de", $depth - 1, $r); + foreach my $dir ($self->get_dir_globbed(@args)) { + push @finalents, "$de/$dir"; + } + } else { + push @finalents, $de; + } + } + @finalents; +} + +sub match_globs { + my ($self, $exists, $paths, $globs, $r) = @_; + + sub get_dir_check { + my ($self, $exists, $g, $r) = @_; + + my @dirs = $self->get_dir_globbed($g->{path}->{left}, + $g->{path}->{depth}, + $r); + + foreach my $de (@dirs) { + my $p = $g->{path}->full_path($de); + next if $exists->{$p}; + next if (length $g->{path}->{right} && + ($self->check_path($p, $r) != + $SVN::Node::dir)); + next unless $p =~ /$g->{path}->{regex}/; + $exists->{$p} = Git::SVN->init($self->{url}, $p, undef, + $g->{ref}->full_path($de), 1); + } + } + foreach my $g (@$globs) { + if (my $path = $paths->{"/$g->{path}->{left}"}) { + if ($path->{action} =~ /^[AR]$/) { + get_dir_check($self, $exists, $g, $r); + } + } + foreach (keys %$paths) { + if (/$g->{path}->{left_regex}/ && + !/$g->{path}->{regex}/) { + next if $paths->{$_}->{action} !~ /^[AR]$/; + get_dir_check($self, $exists, $g, $r); + } + next unless /$g->{path}->{regex}/; + my $p = $1; + my $pathname = $g->{path}->full_path($p); + next if $exists->{$pathname}; + next if ($self->check_path($pathname, $r) != + $SVN::Node::dir); + $exists->{$pathname} = Git::SVN->init( + $self->{url}, $pathname, undef, + $g->{ref}->full_path($p), 1); + } + my $c = ''; + foreach (split m#/#, $g->{path}->{left}) { + $c .= "/$_"; + next unless ($paths->{$c} && + ($paths->{$c}->{action} =~ /^[AR]$/)); + get_dir_check($self, $exists, $g, $r); + } + } + values %$exists; +} + +sub minimize_url { + my ($self) = @_; + return $self->{url} if ($self->{url} eq $self->{repos_root}); + my $url = $self->{repos_root}; + my @components = split(m!/!, $self->{svn_path}); + my $c = ''; + do { + $url .= "/$c" if length $c; + eval { + my $ra = (ref $self)->new($url); + my $latest = $ra->get_latest_revnum; + $ra->get_log("", $latest, 0, 1, 0, 1, sub {}); + }; + } while ($@ && ($c = shift @components)); + $url; +} + +sub can_do_switch { + my $self = shift; + unless (defined $can_do_switch) { + my $pool = SVN::Pool->new; + my $rep = eval { + $self->do_switch(1, '', 0, $self->{url}, + SVN::Delta::Editor->new, $pool); + }; + if ($@) { + $can_do_switch = 0; + } else { + $rep->abort_report($pool); + $can_do_switch = 1; + } + $pool->clear; + } + $can_do_switch; +} + +sub skip_unknown_revs { + my ($err) = @_; + my $errno = $err->apr_err(); + # Maybe the branch we're tracking didn't + # exist when the repo started, so it's + # not an error if it doesn't, just continue + # + # Wonderfully consistent library, eh? + # 160013 - svn:// and file:// + # 175002 - http(s):// + # 175007 - http(s):// (this repo required authorization, too...) + # More codes may be discovered later... + if ($errno == 175007 || $errno == 175002 || $errno == 160013) { + my $err_key = $err->expanded_message; + # revision numbers change every time, filter them out + $err_key =~ s/\d+/\0/g; + $err_key = "$errno\0$err_key"; + unless ($ignored_err{$err_key}) { + warn "W: Ignoring error from SVN, path probably ", + "does not exist: ($errno): ", + $err->expanded_message,"\n"; + warn "W: Do not be alarmed at the above message ", + "git-svn is just searching aggressively for ", + "old history.\n", + "This may take a while on large repositories\n"; + $ignored_err{$err_key} = 1; + } + return; + } + die "Error from SVN, ($errno): ", $err->expanded_message,"\n"; +} + +package Git::SVN::Log; +use strict; +use warnings; +use POSIX qw/strftime/; +use Time::Local; +use constant commit_log_separator => ('-' x 72) . "\n"; +use vars qw/$TZ $limit $color $pager $non_recursive $verbose $oneline + %rusers $show_commit $incremental/; +my $l_fmt; + +sub cmt_showable { + my ($c) = @_; + return 1 if defined $c->{r}; + + # big commit message got truncated by the 16k pretty buffer in rev-list + if ($c->{l} && $c->{l}->[-1] eq "...\n" && + $c->{a_raw} =~ /\@([a-f\d\-]+)>$/) { + @{$c->{l}} = (); + my @log = command(qw/cat-file commit/, $c->{c}); + + # shift off the headers + shift @log while ($log[0] ne ''); + shift @log; + + # TODO: make $c->{l} not have a trailing newline in the future + @{$c->{l}} = map { "$_\n" } grep !/^git-svn-id: /, @log; + + (undef, $c->{r}, undef) = ::extract_metadata( + (grep(/^git-svn-id: /, @log))[-1]); + } + return defined $c->{r}; +} + +sub log_use_color { + return $color || Git->repository->get_colorbool('color.diff'); +} + +sub git_svn_log_cmd { + my ($r_min, $r_max, @args) = @_; + my $head = 'HEAD'; + my (@files, @log_opts); + foreach my $x (@args) { + if ($x eq '--' || @files) { + push @files, $x; + } else { + if (::verify_ref("$x^0")) { + $head = $x; + } else { + push @log_opts, $x; + } + } + } + + my ($url, $rev, $uuid, $gs) = ::working_head_info($head); + $gs ||= Git::SVN->_new; + my @cmd = (qw/log --abbrev-commit --pretty=raw --default/, + $gs->refname); + push @cmd, '-r' unless $non_recursive; + push @cmd, qw/--raw --name-status/ if $verbose; + push @cmd, '--color' if log_use_color(); + push @cmd, @log_opts; + if (defined $r_max && $r_max == $r_min) { + push @cmd, '--max-count=1'; + if (my $c = $gs->rev_map_get($r_max)) { + push @cmd, $c; + } + } elsif (defined $r_max) { + if ($r_max < $r_min) { + ($r_min, $r_max) = ($r_max, $r_min); + } + my (undef, $c_max) = $gs->find_rev_before($r_max, 1, $r_min); + my (undef, $c_min) = $gs->find_rev_after($r_min, 1, $r_max); + # If there are no commits in the range, both $c_max and $c_min + # will be undefined. If there is at least 1 commit in the + # range, both will be defined. + return () if !defined $c_min || !defined $c_max; + if ($c_min eq $c_max) { + push @cmd, '--max-count=1', $c_min; + } else { + push @cmd, '--boundary', "$c_min..$c_max"; + } + } + return (@cmd, @files); +} + +# adapted from pager.c +sub config_pager { + if (! -t *STDOUT) { + $ENV{GIT_PAGER_IN_USE} = 'false'; + $pager = undef; + return; + } + chomp($pager = command_oneline(qw(var GIT_PAGER))); + if ($pager eq 'cat') { + $pager = undef; + } + $ENV{GIT_PAGER_IN_USE} = defined($pager); +} + +sub run_pager { + return unless defined $pager; + pipe my ($rfd, $wfd) or return; + defined(my $pid = fork) or ::fatal "Can't fork: $!"; + if (!$pid) { + open STDOUT, '>&', $wfd or + ::fatal "Can't redirect to stdout: $!"; + return; + } + open STDIN, '<&', $rfd or ::fatal "Can't redirect stdin: $!"; + $ENV{LESS} ||= 'FRSX'; + exec $pager or ::fatal "Can't run pager: $! ($pager)"; +} + +sub format_svn_date { + # some systmes don't handle or mishandle %z, so be creative. + my $t = shift || time; + my $gm = timelocal(gmtime($t)); + my $sign = qw( + + - )[ $t <=> $gm ]; + my $gmoff = sprintf("%s%02d%02d", $sign, (gmtime(abs($t - $gm)))[2,1]); + return strftime("%Y-%m-%d %H:%M:%S $gmoff (%a, %d %b %Y)", localtime($t)); +} + +sub parse_git_date { + my ($t, $tz) = @_; + # Date::Parse isn't in the standard Perl distro :( + if ($tz =~ s/^\+//) { + $t += tz_to_s_offset($tz); + } elsif ($tz =~ s/^\-//) { + $t -= tz_to_s_offset($tz); + } + return $t; +} + +sub set_local_timezone { + if (defined $TZ) { + $ENV{TZ} = $TZ; + } else { + delete $ENV{TZ}; + } +} + +sub tz_to_s_offset { + my ($tz) = @_; + $tz =~ s/(\d\d)$//; + return ($1 * 60) + ($tz * 3600); +} + +sub get_author_info { + my ($dest, $author, $t, $tz) = @_; + $author =~ s/(?:^\s*|\s*$)//g; + $dest->{a_raw} = $author; + my $au; + if ($::_authors) { + $au = $rusers{$author} || undef; + } + if (!$au) { + ($au) = ($author =~ /<([^>]+)\@[^>]+>$/); + } + $dest->{t} = $t; + $dest->{tz} = $tz; + $dest->{a} = $au; + $dest->{t_utc} = parse_git_date($t, $tz); +} + +sub process_commit { + my ($c, $r_min, $r_max, $defer) = @_; + if (defined $r_min && defined $r_max) { + if ($r_min == $c->{r} && $r_min == $r_max) { + show_commit($c); + return 0; + } + return 1 if $r_min == $r_max; + if ($r_min < $r_max) { + # we need to reverse the print order + return 0 if (defined $limit && --$limit < 0); + push @$defer, $c; + return 1; + } + if ($r_min != $r_max) { + return 1 if ($r_min < $c->{r}); + return 1 if ($r_max > $c->{r}); + } + } + return 0 if (defined $limit && --$limit < 0); + show_commit($c); + return 1; +} + +sub show_commit { + my $c = shift; + if ($oneline) { + my $x = "\n"; + if (my $l = $c->{l}) { + while ($l->[0] =~ /^\s*$/) { shift @$l } + $x = $l->[0]; + } + $l_fmt ||= 'A' . length($c->{r}); + print 'r',pack($l_fmt, $c->{r}),' | '; + print "$c->{c} | " if $show_commit; + print $x; + } else { + show_commit_normal($c); + } +} + +sub show_commit_changed_paths { + my ($c) = @_; + return unless $c->{changed}; + print "Changed paths:\n", @{$c->{changed}}; +} + +sub show_commit_normal { + my ($c) = @_; + print commit_log_separator, "r$c->{r} | "; + print "$c->{c} | " if $show_commit; + print "$c->{a} | ", format_svn_date($c->{t_utc}), ' | '; + my $nr_line = 0; + + if (my $l = $c->{l}) { + while ($l->[$#$l] eq "\n" && $#$l > 0 + && $l->[($#$l - 1)] eq "\n") { + pop @$l; + } + $nr_line = scalar @$l; + if (!$nr_line) { + print "1 line\n\n\n"; + } else { + if ($nr_line == 1) { + $nr_line = '1 line'; + } else { + $nr_line .= ' lines'; + } + print $nr_line, "\n"; + show_commit_changed_paths($c); + print "\n"; + print $_ foreach @$l; + } + } else { + print "1 line\n"; + show_commit_changed_paths($c); + print "\n"; + + } + foreach my $x (qw/raw stat diff/) { + if ($c->{$x}) { + print "\n"; + print $_ foreach @{$c->{$x}} + } + } +} + +sub cmd_show_log { + my (@args) = @_; + my ($r_min, $r_max); + my $r_last = -1; # prevent dupes + set_local_timezone(); + if (defined $::_revision) { + if ($::_revision =~ /^(\d+):(\d+)$/) { + ($r_min, $r_max) = ($1, $2); + } elsif ($::_revision =~ /^\d+$/) { + $r_min = $r_max = $::_revision; + } else { + ::fatal "-r$::_revision is not supported, use ", + "standard 'git log' arguments instead"; + } + } + + config_pager(); + @args = git_svn_log_cmd($r_min, $r_max, @args); + if (!@args) { + print commit_log_separator unless $incremental || $oneline; + return; + } + my $log = command_output_pipe(@args); + run_pager(); + my (@k, $c, $d, $stat); + my $esc_color = qr/(?:\033\[(?:(?:\d+;)*\d*)?m)*/; + while (<$log>) { + if (/^${esc_color}commit -?($::sha1_short)/o) { + my $cmt = $1; + if ($c && cmt_showable($c) && $c->{r} != $r_last) { + $r_last = $c->{r}; + process_commit($c, $r_min, $r_max, \@k) or + goto out; + } + $d = undef; + $c = { c => $cmt }; + } elsif (/^${esc_color}author (.+) (\d+) ([\-\+]?\d+)$/o) { + get_author_info($c, $1, $2, $3); + } elsif (/^${esc_color}(?:tree|parent|committer) /o) { + # ignore + } elsif (/^${esc_color}:\d{6} \d{6} $::sha1_short/o) { + push @{$c->{raw}}, $_; + } elsif (/^${esc_color}[ACRMDT]\t/) { + # we could add $SVN->{svn_path} here, but that requires + # remote access at the moment (repo_path_split)... + s#^(${esc_color})([ACRMDT])\t#$1 $2 #o; + push @{$c->{changed}}, $_; + } elsif (/^${esc_color}diff /o) { + $d = 1; + push @{$c->{diff}}, $_; + } elsif ($d) { + push @{$c->{diff}}, $_; + } elsif (/^\ .+\ \|\s*\d+\ $esc_color[\+\-]* + $esc_color*[\+\-]*$esc_color$/x) { + $stat = 1; + push @{$c->{stat}}, $_; + } elsif ($stat && /^ \d+ files changed, \d+ insertions/) { + push @{$c->{stat}}, $_; + $stat = undef; + } elsif (/^${esc_color} (git-svn-id:.+)$/o) { + ($c->{url}, $c->{r}, undef) = ::extract_metadata($1); + } elsif (s/^${esc_color} //o) { + push @{$c->{l}}, $_; + } + } + if ($c && defined $c->{r} && $c->{r} != $r_last) { + $r_last = $c->{r}; + process_commit($c, $r_min, $r_max, \@k); + } + if (@k) { + ($r_min, $r_max) = ($r_max, $r_min); + process_commit($_, $r_min, $r_max) foreach reverse @k; + } +out: + close $log; + print commit_log_separator unless $incremental || $oneline; +} + +sub cmd_blame { + my $path = pop; + + config_pager(); + run_pager(); + + my ($fh, $ctx, $rev); + + if ($_git_format) { + ($fh, $ctx) = command_output_pipe('blame', @_, $path); + while (my $line = <$fh>) { + if ($line =~ /^\^?([[:xdigit:]]+)\s/) { + # Uncommitted edits show up as a rev ID of + # all zeros, which we can't look up with + # cmt_metadata + if ($1 !~ /^0+$/) { + (undef, $rev, undef) = + ::cmt_metadata($1); + $rev = '0' if (!$rev); + } else { + $rev = '0'; + } + $rev = sprintf('%-10s', $rev); + $line =~ s/^\^?[[:xdigit:]]+(\s)/$rev$1/; + } + print $line; + } + } else { + ($fh, $ctx) = command_output_pipe('blame', '-p', @_, 'HEAD', + '--', $path); + my ($sha1); + my %authors; + my @buffer; + my %dsha; #distinct sha keys + + while (my $line = <$fh>) { + push @buffer, $line; + if ($line =~ /^([[:xdigit:]]{40})\s\d+\s\d+/) { + $dsha{$1} = 1; + } + } + + my $s2r = ::cmt_sha2rev_batch([keys %dsha]); + + foreach my $line (@buffer) { + if ($line =~ /^([[:xdigit:]]{40})\s\d+\s\d+/) { + $rev = $s2r->{$1}; + $rev = '0' if (!$rev) + } + elsif ($line =~ /^author (.*)/) { + $authors{$rev} = $1; + $authors{$rev} =~ s/\s/_/g; + } + elsif ($line =~ /^\t(.*)$/) { + printf("%6s %10s %s\n", $rev, $authors{$rev}, $1); + } + } + } + command_close_pipe($fh, $ctx); +} + +package Git::SVN::Migration; +# these version numbers do NOT correspond to actual version numbers +# of git nor git-svn. They are just relative. +# +# v0 layout: .git/$id/info/url, refs/heads/$id-HEAD +# +# v1 layout: .git/$id/info/url, refs/remotes/$id +# +# v2 layout: .git/svn/$id/info/url, refs/remotes/$id +# +# v3 layout: .git/svn/$id, refs/remotes/$id +# - info/url may remain for backwards compatibility +# - this is what we migrate up to this layout automatically, +# - this will be used by git svn init on single branches +# v3.1 layout (auto migrated): +# - .rev_db => .rev_db.$UUID, .rev_db will remain as a symlink +# for backwards compatibility +# +# v4 layout: .git/svn/$repo_id/$id, refs/remotes/$repo_id/$id +# - this is only created for newly multi-init-ed +# repositories. Similar in spirit to the +# --use-separate-remotes option in git-clone (now default) +# - we do not automatically migrate to this (following +# the example set by core git) +# +# v5 layout: .rev_db.$UUID => .rev_map.$UUID +# - newer, more-efficient format that uses 24-bytes per record +# with no filler space. +# - use xxd -c24 < .rev_map.$UUID to view and debug +# - This is a one-way migration, repositories updated to the +# new format will not be able to use old git-svn without +# rebuilding the .rev_db. Rebuilding the rev_db is not +# possible if noMetadata or useSvmProps are set; but should +# be no problem for users that use the (sensible) defaults. +use strict; +use warnings; +use Carp qw/croak/; +use File::Path qw/mkpath/; +use File::Basename qw/dirname basename/; +use vars qw/$_minimize/; + +sub migrate_from_v0 { + my $git_dir = $ENV{GIT_DIR}; + return undef unless -d $git_dir; + my ($fh, $ctx) = command_output_pipe(qw/rev-parse --symbolic --all/); + my $migrated = 0; + while (<$fh>) { + chomp; + my ($id, $orig_ref) = ($_, $_); + next unless $id =~ s#^refs/heads/(.+)-HEAD$#$1#; + next unless -f "$git_dir/$id/info/url"; + my $new_ref = "refs/remotes/$id"; + if (::verify_ref("$new_ref^0")) { + print STDERR "W: $orig_ref is probably an old ", + "branch used by an ancient version of ", + "git-svn.\n", + "However, $new_ref also exists.\n", + "We will not be able ", + "to use this branch until this ", + "ambiguity is resolved.\n"; + next; + } + print STDERR "Migrating from v0 layout...\n" if !$migrated; + print STDERR "Renaming ref: $orig_ref => $new_ref\n"; + command_noisy('update-ref', $new_ref, $orig_ref); + command_noisy('update-ref', '-d', $orig_ref, $orig_ref); + $migrated++; + } + command_close_pipe($fh, $ctx); + print STDERR "Done migrating from v0 layout...\n" if $migrated; + $migrated; +} + +sub migrate_from_v1 { + my $git_dir = $ENV{GIT_DIR}; + my $migrated = 0; + return $migrated unless -d $git_dir; + my $svn_dir = "$git_dir/svn"; + + # just in case somebody used 'svn' as their $id at some point... + return $migrated if -d $svn_dir && ! -f "$svn_dir/info/url"; + + print STDERR "Migrating from a git-svn v1 layout...\n"; + mkpath([$svn_dir]); + print STDERR "Data from a previous version of git-svn exists, but\n\t", + "$svn_dir\n\t(required for this version ", + "($::VERSION) of git-svn) does not exist.\n"; + my ($fh, $ctx) = command_output_pipe(qw/rev-parse --symbolic --all/); + while (<$fh>) { + my $x = $_; + next unless $x =~ s#^refs/remotes/##; + chomp $x; + next unless -f "$git_dir/$x/info/url"; + my $u = eval { ::file_to_s("$git_dir/$x/info/url") }; + next unless $u; + my $dn = dirname("$git_dir/svn/$x"); + mkpath([$dn]) unless -d $dn; + if ($x eq 'svn') { # they used 'svn' as GIT_SVN_ID: + mkpath(["$git_dir/svn/svn"]); + print STDERR " - $git_dir/$x/info => ", + "$git_dir/svn/$x/info\n"; + rename "$git_dir/$x/info", "$git_dir/svn/$x/info" or + croak "$!: $x"; + # don't worry too much about these, they probably + # don't exist with repos this old (save for index, + # and we can easily regenerate that) + foreach my $f (qw/unhandled.log index .rev_db/) { + rename "$git_dir/$x/$f", "$git_dir/svn/$x/$f"; + } + } else { + print STDERR " - $git_dir/$x => $git_dir/svn/$x\n"; + rename "$git_dir/$x", "$git_dir/svn/$x" or + croak "$!: $x"; + } + $migrated++; + } + command_close_pipe($fh, $ctx); + print STDERR "Done migrating from a git-svn v1 layout\n"; + $migrated; +} + +sub read_old_urls { + my ($l_map, $pfx, $path) = @_; + my @dir; + foreach (<$path/*>) { + if (-r "$_/info/url") { + $pfx .= '/' if $pfx && $pfx !~ m!/$!; + my $ref_id = $pfx . basename $_; + my $url = ::file_to_s("$_/info/url"); + $l_map->{$ref_id} = $url; + } elsif (-d $_) { + push @dir, $_; + } + } + foreach (@dir) { + my $x = $_; + $x =~ s!^\Q$ENV{GIT_DIR}\E/svn/!!o; + read_old_urls($l_map, $x, $_); + } +} + +sub migrate_from_v2 { + my @cfg = command(qw/config -l/); + return if grep /^svn-remote\..+\.url=/, @cfg; + my %l_map; + read_old_urls(\%l_map, '', "$ENV{GIT_DIR}/svn"); + my $migrated = 0; + + foreach my $ref_id (sort keys %l_map) { + eval { Git::SVN->init($l_map{$ref_id}, '', undef, $ref_id) }; + if ($@) { + Git::SVN->init($l_map{$ref_id}, '', $ref_id, $ref_id); + } + $migrated++; + } + $migrated; +} + +sub minimize_connections { + my $r = Git::SVN::read_all_remotes(); + my $new_urls = {}; + my $root_repos = {}; + foreach my $repo_id (keys %$r) { + my $url = $r->{$repo_id}->{url} or next; + my $fetch = $r->{$repo_id}->{fetch} or next; + my $ra = Git::SVN::Ra->new($url); + + # skip existing cases where we already connect to the root + if (($ra->{url} eq $ra->{repos_root}) || + ($ra->{repos_root} eq $repo_id)) { + $root_repos->{$ra->{url}} = $repo_id; + next; + } + + my $root_ra = Git::SVN::Ra->new($ra->{repos_root}); + my $root_path = $ra->{url}; + $root_path =~ s#^\Q$ra->{repos_root}\E(/|$)##; + foreach my $path (keys %$fetch) { + my $ref_id = $fetch->{$path}; + my $gs = Git::SVN->new($ref_id, $repo_id, $path); + + # make sure we can read when connecting to + # a higher level of a repository + my ($last_rev, undef) = $gs->last_rev_commit; + if (!defined $last_rev) { + $last_rev = eval { + $root_ra->get_latest_revnum; + }; + next if $@; + } + my $new = $root_path; + $new .= length $path ? "/$path" : ''; + eval { + $root_ra->get_log([$new], $last_rev, $last_rev, + 0, 0, 1, sub { }); + }; + next if $@; + $new_urls->{$ra->{repos_root}}->{$new} = + { ref_id => $ref_id, + old_repo_id => $repo_id, + old_path => $path }; + } + } + + my @emptied; + foreach my $url (keys %$new_urls) { + # see if we can re-use an existing [svn-remote "repo_id"] + # instead of creating a(n ugly) new section: + my $repo_id = $root_repos->{$url} || $url; + + my $fetch = $new_urls->{$url}; + foreach my $path (keys %$fetch) { + my $x = $fetch->{$path}; + Git::SVN->init($url, $path, $repo_id, $x->{ref_id}); + my $pfx = "svn-remote.$x->{old_repo_id}"; + + my $old_fetch = quotemeta("$x->{old_path}:". + "$x->{ref_id}"); + command_noisy(qw/config --unset/, + "$pfx.fetch", '^'. $old_fetch . '$'); + delete $r->{$x->{old_repo_id}}-> + {fetch}->{$x->{old_path}}; + if (!keys %{$r->{$x->{old_repo_id}}->{fetch}}) { + command_noisy(qw/config --unset/, + "$pfx.url"); + push @emptied, $x->{old_repo_id} + } + } + } + if (@emptied) { + my $file = $ENV{GIT_CONFIG} || "$ENV{GIT_DIR}/config"; + print STDERR < $gui, ctx => $ctx, nr => 0}, $class; +} + +sub remove { + my ($self, $path) = @_; + if (print { $self->{gui} } '0 ', 0 x 40, "\t", $path, "\0") { + return ++$self->{nr}; + } + undef; +} + +sub update { + my ($self, $mode, $hash, $path) = @_; + if (print { $self->{gui} } $mode, ' ', $hash, "\t", $path, "\0") { + return ++$self->{nr}; + } + undef; +} + +sub DESTROY { + my ($self) = @_; + command_close_pipe($self->{gui}, $self->{ctx}); +} + +package Git::SVN::GlobSpec; +use strict; +use warnings; + +sub new { + my ($class, $glob, $pattern_ok) = @_; + my $re = $glob; + $re =~ s!/+$!!g; # no need for trailing slashes + my (@left, @right, @patterns); + my $state = "left"; + my $die_msg = "Only one set of wildcard directories " . + "(e.g. '*' or '*/*/*') is supported: '$glob'\n"; + for my $part (split(m|/|, $glob)) { + if ($part =~ /\*/ && $part ne "*") { + die "Invalid pattern in '$glob': $part\n"; + } elsif ($pattern_ok && $part =~ /[{}]/ && + $part !~ /^\{[^{}]+\}/) { + die "Invalid pattern in '$glob': $part\n"; + } + if ($part eq "*") { + die $die_msg if $state eq "right"; + $state = "pattern"; + push(@patterns, "[^/]*"); + } elsif ($pattern_ok && $part =~ /^\{(.*)\}$/) { + die $die_msg if $state eq "right"; + $state = "pattern"; + my $p = quotemeta($1); + $p =~ s/\\,/|/g; + push(@patterns, "(?:$p)"); + } else { + if ($state eq "left") { + push(@left, $part); + } else { + push(@right, $part); + $state = "right"; + } + } + } + my $depth = @patterns; + if ($depth == 0) { + die "One '*' is needed in glob: '$glob'\n"; + } + my $left = join('/', @left); + my $right = join('/', @right); + $re = join('/', @patterns); + $re = join('\/', + grep(length, quotemeta($left), "($re)", quotemeta($right))); + my $left_re = qr/^\/\Q$left\E(\/|$)/; + bless { left => $left, right => $right, left_regex => $left_re, + regex => qr/$re/, glob => $glob, depth => $depth }, $class; +} + +sub full_path { + my ($self, $path) = @_; + return (length $self->{left} ? "$self->{left}/" : '') . + $path . (length $self->{right} ? "/$self->{right}" : ''); +} + +__END__ + +Data structures: + + +$remotes = { # returned by read_all_remotes() + 'svn' => { + # svn-remote.svn.url=https://svn.musicpd.org + url => 'https://svn.musicpd.org', + # svn-remote.svn.fetch=mpd/trunk:trunk + fetch => { + 'mpd/trunk' => 'trunk', + }, + # svn-remote.svn.tags=mpd/tags/*:tags/* + tags => { + path => { + left => 'mpd/tags', + right => '', + regex => qr!mpd/tags/([^/]+)$!, + glob => 'tags/*', + }, + ref => { + left => 'tags', + right => '', + regex => qr!tags/([^/]+)$!, + glob => 'tags/*', + }, + } + } +}; + +$log_entry hashref as returned by libsvn_log_entry() +{ + log => 'whitespace-formatted log entry +', # trailing newline is preserved + revision => '8', # integer + date => '2004-02-24T17:01:44.108345Z', # commit date + author => 'committer name' +}; + + +# this is generated by generate_diff(); +@mods = array of diff-index line hashes, each element represents one line + of diff-index output + +diff-index line ($m hash) +{ + mode_a => first column of diff-index output, no leading ':', + mode_b => second column of diff-index output, + sha1_b => sha1sum of the final blob, + chg => change type [MCRADT], + file_a => original file name of a file (iff chg is 'C' or 'R') + file_b => new/current file name of a file (any chg) +} +; + +# retval of read_url_paths{,_all}(); +$l_map = { + # repository root url + 'https://svn.musicpd.org' => { + # repository path # GIT_SVN_ID + 'mpd/trunk' => 'trunk', + 'mpd/tags/0.11.5' => 'tags/0.11.5', + }, +} + +Notes: + I don't trust the each() function on unless I created %hash myself + because the internal iterator may not have started at base. diff --git a/SparkleShare/Mac/git/libexec/git-core/git-symbolic-ref b/SparkleShare/Mac/git/libexec/git-core/git-symbolic-ref new file mode 120000 index 00000000..f2cccfc2 --- /dev/null +++ b/SparkleShare/Mac/git/libexec/git-core/git-symbolic-ref @@ -0,0 +1 @@ +../../bin/git \ No newline at end of file diff --git a/SparkleShare/Mac/git/libexec/git-core/git-tag b/SparkleShare/Mac/git/libexec/git-core/git-tag new file mode 120000 index 00000000..f2cccfc2 --- /dev/null +++ b/SparkleShare/Mac/git/libexec/git-core/git-tag @@ -0,0 +1 @@ +../../bin/git \ No newline at end of file diff --git a/SparkleShare/Mac/git/libexec/git-core/git-tar-tree b/SparkleShare/Mac/git/libexec/git-core/git-tar-tree new file mode 120000 index 00000000..f2cccfc2 --- /dev/null +++ b/SparkleShare/Mac/git/libexec/git-core/git-tar-tree @@ -0,0 +1 @@ +../../bin/git \ No newline at end of file diff --git a/SparkleShare/Mac/git/libexec/git-core/git-unpack-file b/SparkleShare/Mac/git/libexec/git-core/git-unpack-file new file mode 120000 index 00000000..f2cccfc2 --- /dev/null +++ b/SparkleShare/Mac/git/libexec/git-core/git-unpack-file @@ -0,0 +1 @@ +../../bin/git \ No newline at end of file diff --git a/SparkleShare/Mac/git/libexec/git-core/git-unpack-objects b/SparkleShare/Mac/git/libexec/git-core/git-unpack-objects new file mode 120000 index 00000000..f2cccfc2 --- /dev/null +++ b/SparkleShare/Mac/git/libexec/git-core/git-unpack-objects @@ -0,0 +1 @@ +../../bin/git \ No newline at end of file diff --git a/SparkleShare/Mac/git/libexec/git-core/git-update-index b/SparkleShare/Mac/git/libexec/git-core/git-update-index new file mode 120000 index 00000000..f2cccfc2 --- /dev/null +++ b/SparkleShare/Mac/git/libexec/git-core/git-update-index @@ -0,0 +1 @@ +../../bin/git \ No newline at end of file diff --git a/SparkleShare/Mac/git/libexec/git-core/git-update-ref b/SparkleShare/Mac/git/libexec/git-core/git-update-ref new file mode 120000 index 00000000..f2cccfc2 --- /dev/null +++ b/SparkleShare/Mac/git/libexec/git-core/git-update-ref @@ -0,0 +1 @@ +../../bin/git \ No newline at end of file diff --git a/SparkleShare/Mac/git/libexec/git-core/git-update-server-info b/SparkleShare/Mac/git/libexec/git-core/git-update-server-info new file mode 120000 index 00000000..f2cccfc2 --- /dev/null +++ b/SparkleShare/Mac/git/libexec/git-core/git-update-server-info @@ -0,0 +1 @@ +../../bin/git \ No newline at end of file diff --git a/SparkleShare/Mac/git/libexec/git-core/git-upload-archive b/SparkleShare/Mac/git/libexec/git-core/git-upload-archive new file mode 120000 index 00000000..f2cccfc2 --- /dev/null +++ b/SparkleShare/Mac/git/libexec/git-core/git-upload-archive @@ -0,0 +1 @@ +../../bin/git \ No newline at end of file diff --git a/SparkleShare/Mac/git/libexec/git-core/git-upload-pack b/SparkleShare/Mac/git/libexec/git-core/git-upload-pack new file mode 100755 index 00000000..593fb440 Binary files /dev/null and b/SparkleShare/Mac/git/libexec/git-core/git-upload-pack differ diff --git a/SparkleShare/Mac/git/libexec/git-core/git-var b/SparkleShare/Mac/git/libexec/git-core/git-var new file mode 120000 index 00000000..f2cccfc2 --- /dev/null +++ b/SparkleShare/Mac/git/libexec/git-core/git-var @@ -0,0 +1 @@ +../../bin/git \ No newline at end of file diff --git a/SparkleShare/Mac/git/libexec/git-core/git-verify-pack b/SparkleShare/Mac/git/libexec/git-core/git-verify-pack new file mode 120000 index 00000000..f2cccfc2 --- /dev/null +++ b/SparkleShare/Mac/git/libexec/git-core/git-verify-pack @@ -0,0 +1 @@ +../../bin/git \ No newline at end of file diff --git a/SparkleShare/Mac/git/libexec/git-core/git-verify-tag b/SparkleShare/Mac/git/libexec/git-core/git-verify-tag new file mode 120000 index 00000000..f2cccfc2 --- /dev/null +++ b/SparkleShare/Mac/git/libexec/git-core/git-verify-tag @@ -0,0 +1 @@ +../../bin/git \ No newline at end of file diff --git a/SparkleShare/Mac/git/libexec/git-core/git-web--browse b/SparkleShare/Mac/git/libexec/git-core/git-web--browse new file mode 100755 index 00000000..e9de241d --- /dev/null +++ b/SparkleShare/Mac/git/libexec/git-core/git-web--browse @@ -0,0 +1,191 @@ +#!/bin/sh +# +# This program launch a web browser on the html page +# describing a git command. +# +# Copyright (c) 2007 Christian Couder +# Copyright (c) 2006 Theodore Y. Ts'o +# +# This file is heavily stolen from git-mergetool.sh, by +# Theodore Y. Ts'o (thanks) that is: +# +# Copyright (c) 2006 Theodore Y. Ts'o +# +# This file is licensed under the GPL v2, or a later version +# at the discretion of Junio C Hamano or any other official +# git maintainer. +# + +USAGE='[--browser=browser|--tool=browser] [--config=conf.var] url/file ...' + +# This must be capable of running outside of git directory, so +# the vanilla git-sh-setup should not be used. +NONGIT_OK=Yes +. git-sh-setup + +valid_custom_tool() +{ + browser_cmd="$(git config "browser.$1.cmd")" + test -n "$browser_cmd" +} + +valid_tool() { + case "$1" in + firefox | iceweasel | seamonkey | iceape | \ + chrome | google-chrome | chromium | chromium-browser |\ + konqueror | opera | w3m | elinks | links | lynx | dillo | open | start) + ;; # happy + *) + valid_custom_tool "$1" || return 1 + ;; + esac +} + +init_browser_path() { + browser_path=$(git config "browser.$1.path") + if test -z "$browser_path" && + test "$1" = chromium && + type chromium-browser >/dev/null 2>&1 + then + browser_path=chromium-browser + fi + : ${browser_path:="$1"} +} + +while test $# != 0 +do + case "$1" in + -b|--browser*|-t|--tool*) + case "$#,$1" in + *,*=*) + browser=`expr "z$1" : 'z-[^=]*=\(.*\)'` + ;; + 1,*) + usage ;; + *) + browser="$2" + shift ;; + esac + ;; + -c|--config*) + case "$#,$1" in + *,*=*) + conf=`expr "z$1" : 'z-[^=]*=\(.*\)'` + ;; + 1,*) + usage ;; + *) + conf="$2" + shift ;; + esac + ;; + --) + break + ;; + -*) + usage + ;; + *) + break + ;; + esac + shift +done + +test $# = 0 && usage + +if test -z "$browser" +then + for opt in "$conf" "web.browser" + do + test -z "$opt" && continue + browser="`git config $opt`" + test -z "$browser" || break + done + if test -n "$browser" && ! valid_tool "$browser"; then + echo >&2 "git config option $opt set to unknown browser: $browser" + echo >&2 "Resetting to default..." + unset browser + fi +fi + +if test -z "$browser" ; then + if test -n "$DISPLAY"; then + browser_candidates="firefox iceweasel google-chrome chrome chromium chromium-browser konqueror opera seamonkey iceape w3m elinks links lynx dillo" + if test "$KDE_FULL_SESSION" = "true"; then + browser_candidates="konqueror $browser_candidates" + fi + else + browser_candidates="w3m elinks links lynx" + fi + # SECURITYSESSIONID indicates an OS X GUI login session + if test -n "$SECURITYSESSIONID" \ + -o "$TERM_PROGRAM" = "Apple_Terminal" ; then + browser_candidates="open $browser_candidates" + fi + # /bin/start indicates MinGW + if test -x /bin/start; then + browser_candidates="start $browser_candidates" + fi + + for i in $browser_candidates; do + init_browser_path $i + if type "$browser_path" > /dev/null 2>&1; then + browser=$i + break + fi + done + test -z "$browser" && die "No known browser available." +else + valid_tool "$browser" || die "Unknown browser '$browser'." + + init_browser_path "$browser" + + if test -z "$browser_cmd" && ! type "$browser_path" > /dev/null 2>&1; then + die "The browser $browser is not available as '$browser_path'." + fi +fi + +case "$browser" in +firefox|iceweasel|seamonkey|iceape) + # Check version because firefox < 2.0 does not support "-new-tab". + vers=$(expr "$($browser_path -version)" : '.* \([0-9][0-9]*\)\..*') + NEWTAB='-new-tab' + test "$vers" -lt 2 && NEWTAB='' + "$browser_path" $NEWTAB "$@" & + ;; +google-chrome|chrome|chromium|chromium-browser) + # No need to specify newTab. It's default in chromium + eval "$browser_path" "$@" & + ;; +konqueror) + case "$(basename "$browser_path")" in + konqueror) + # It's simpler to use kfmclient to open a new tab in konqueror. + browser_path="$(echo "$browser_path" | sed -e 's/konqueror$/kfmclient/')" + type "$browser_path" > /dev/null 2>&1 || die "No '$browser_path' found." + eval "$browser_path" newTab "$@" + ;; + kfmclient) + eval "$browser_path" newTab "$@" + ;; + *) + "$browser_path" "$@" & + ;; + esac + ;; +w3m|elinks|links|lynx|open) + eval "$browser_path" "$@" + ;; +start) + exec "$browser_path" '"web-browse"' "$@" + ;; +opera|dillo) + "$browser_path" "$@" & + ;; +*) + if test -n "$browser_cmd"; then + ( eval $browser_cmd "$@" ) + fi + ;; +esac diff --git a/SparkleShare/Mac/git/libexec/git-core/git-whatchanged b/SparkleShare/Mac/git/libexec/git-core/git-whatchanged new file mode 120000 index 00000000..f2cccfc2 --- /dev/null +++ b/SparkleShare/Mac/git/libexec/git-core/git-whatchanged @@ -0,0 +1 @@ +../../bin/git \ No newline at end of file diff --git a/SparkleShare/Mac/git/libexec/git-core/git-write-tree b/SparkleShare/Mac/git/libexec/git-core/git-write-tree new file mode 120000 index 00000000..f2cccfc2 --- /dev/null +++ b/SparkleShare/Mac/git/libexec/git-core/git-write-tree @@ -0,0 +1 @@ +../../bin/git \ No newline at end of file diff --git a/SparkleShare/Makefile.am b/SparkleShare/Makefile.am index 17e7a0b3..f4a4330d 100755 --- a/SparkleShare/Makefile.am +++ b/SparkleShare/Makefile.am @@ -23,6 +23,8 @@ SOURCES = \ SparkleEventLog.cs \ SparkleEventLogController.cs \ SparkleExtensions.cs \ + SparkleOptions.cs \ + SparkleInvite.cs \ SparklePlugin.cs \ SparkleSetup.cs \ SparkleSetupController.cs \ @@ -32,20 +34,6 @@ SOURCES = \ SparkleStatusIconController.cs \ SparkleUI.cs \ SparkleUIHelpers.cs - SparkleBubbles.cs \ - SparkleBubblesController.cs \ - SparkleController.cs \ - SparkleEntry.cs \ - SparkleEventLog.cs \ - SparkleEventLogController.cs \ - SparkleSetup.cs \ - SparkleSetupController.cs \ - SparkleSetupWindow.cs \ - SparkleSpinner.cs \ - SparkleStatusIcon.cs \ - SparkleStatusIconController.cs \ - SparkleUI.cs \ - SparkleUIHelpers.cs include $(top_srcdir)/build/build.mk diff --git a/SparkleShare/Program.cs b/SparkleShare/Program.cs index 9910ffa8..573dbd0c 100644 --- a/SparkleShare/Program.cs +++ b/SparkleShare/Program.cs @@ -25,7 +25,6 @@ using System.Text; using Mono.Unix; //using Mono.Unix.Native; using SparkleLib; -using SparkleLib.Options; namespace SparkleShare { @@ -39,10 +38,10 @@ namespace SparkleShare { // Short alias for the translations public static string _ (string s) { - return s; + return Catalog.GetString (s); } - [STAThread] + public static void Main (string [] args) { // Parse the command line options @@ -111,12 +110,12 @@ namespace SparkleShare { // Strange magic needed by SetProcessName () - [DllImport ("libc")] - private static extern int prctl (int option, byte [] arg2, IntPtr arg3, IntPtr arg4, IntPtr arg5); +// [DllImport ("libc")] +// private static extern int prctl (int option, byte [] arg2, IntPtr arg3, IntPtr arg4, IntPtr arg5); // Sets the Unix process name to 'sparkleshare' instead of 'mono' - private static void SetProcessName (string name) +/* private static void SetProcessName (string name) { try { if (prctl (15, Encoding.ASCII.GetBytes (name + "\0"), IntPtr.Zero, IntPtr.Zero, IntPtr.Zero) != 0) @@ -126,6 +125,6 @@ namespace SparkleShare { } catch (EntryPointNotFoundException) { Console.WriteLine ("SetProcessName: Entry point not found"); } - } + } */ } } diff --git a/SparkleShare/SparkleBubblesController.cs b/SparkleShare/SparkleBubblesController.cs index 1aae7d10..3fe424e4 100755 --- a/SparkleShare/SparkleBubblesController.cs +++ b/SparkleShare/SparkleBubblesController.cs @@ -16,6 +16,7 @@ using System; +using SparkleLib; namespace SparkleShare { @@ -24,25 +25,23 @@ namespace SparkleShare { public event ShowBubbleEventHandler ShowBubbleEvent; public delegate void ShowBubbleEventHandler (string title, string subtext, string image_path); - // Short alias for the translations - public static string _ (string s) - { - string t=Program._ (s); - return t; - } public SparkleBubblesController () { Program.Controller.ConflictNotificationRaised += delegate { - ShowBubble (_("Ouch! Mid-air collision!"), - _("Don't worry, SparkleShare made a copy of each conflicting file."), - null); + ShowBubble ("Ouch! Mid-air collision!", + "Don't worry, SparkleShare made a copy of each conflicting file.", + null); }; - Program.Controller.NotificationRaised += delegate (string user_name, string user_email, - string message, string folder_path) { - ShowBubble (user_name, message, - Program.Controller.GetAvatar (user_email, 36)); + Program.Controller.NotificationRaised += delegate (SparkleChangeSet change_set) { + ShowBubble (change_set.User.Name, FormatMessage (change_set), + Program.Controller.GetAvatar (change_set.User.Email, 36)); + }; + + Program.Controller.NoteNotificationRaised += delegate (SparkleUser user, string folder_name) { + ShowBubble (user.Name, "added a note to '" + folder_name + "'", + Program.Controller.GetAvatar (user.Email, 36)); }; } @@ -52,5 +51,47 @@ namespace SparkleShare { if (ShowBubbleEvent != null && Program.Controller.NotificationsEnabled) ShowBubbleEvent (title, subtext, image_path); } + + + private string FormatMessage (SparkleChangeSet change_set) + { + string file_name = ""; + string message = ""; + + if (change_set.Added.Count > 0) { + file_name = change_set.Added [0]; + message = String.Format ("added ‘{0}’", file_name); + } + + if (change_set.MovedFrom.Count > 0) { + file_name = change_set.MovedFrom [0]; + message = String.Format ("moved ‘{0}’", file_name); + } + + if (change_set.Edited.Count > 0) { + file_name = change_set.Edited [0]; + message = String.Format ("edited ‘{0}’", file_name); + } + + if (change_set.Deleted.Count > 0) { + file_name = change_set.Deleted [0]; + message = String.Format ("deleted ‘{0}’", file_name); + } + + int changes_count = (change_set.Added.Count + + change_set.Edited.Count + + change_set.Deleted.Count + + change_set.MovedFrom.Count) - 1; + + if (changes_count > 0) { + string msg = string.Format ("and {0} more", changes_count); + message += " " + String.Format (msg, changes_count); + + } else if (changes_count < 0) { + message += "did something magical"; + } + + return message; + } } } diff --git a/SparkleShare/SparkleController.cs b/SparkleShare/SparkleController.cs index 274322ac..647e6bec 100755 --- a/SparkleShare/SparkleController.cs +++ b/SparkleShare/SparkleController.cs @@ -217,5 +217,14 @@ namespace SparkleShare { process.StartInfo.Arguments = "\"" + folder + "\""; process.Start (); } + + + public override void OpenFile (string url) + { + Process process = new Process (); + process.StartInfo.FileName = "xdg-open"; + process.StartInfo.Arguments = url.Replace (" ", "%20"); + process.Start (); + } } } diff --git a/SparkleShare/SparkleControllerBase.cs b/SparkleShare/SparkleControllerBase.cs index b03ab7e9..16676e18 100644 --- a/SparkleShare/SparkleControllerBase.cs +++ b/SparkleShare/SparkleControllerBase.cs @@ -27,69 +27,70 @@ using System.Text.RegularExpressions; using System.Threading; using System.Xml; -#if __MonoCS__ using Mono.Unix; -#endif using SparkleLib; namespace SparkleShare { public abstract class SparkleControllerBase { - public List Repositories = new List (); - public string FolderSize; + public List Repositories; public readonly string SparklePath = SparkleConfig.DefaultConfig.FoldersPath; - public event OnQuitWhileSyncingEventHandler OnQuitWhileSyncing; - public delegate void OnQuitWhileSyncingEventHandler (); + public double ProgressPercentage = 0.0; + public string ProgressSpeed = ""; + + public event OnQuitWhileSyncingHandler OnQuitWhileSyncing; + public delegate void OnQuitWhileSyncingHandler (); public event FolderFetchedEventHandler FolderFetched; - public delegate void FolderFetchedEventHandler (); + public delegate void FolderFetchedEventHandler (string [] warnings); - public event FolderFetchErrorEventHandler FolderFetchError; - public delegate void FolderFetchErrorEventHandler (string remote_url); + public event FolderFetchErrorHandler FolderFetchError; + public delegate void FolderFetchErrorHandler (string remote_url); - public event FolderFetchingEventHandler FolderFetching; - public delegate void FolderFetchingEventHandler (double percentage); + public event FolderFetchingHandler FolderFetching; + public delegate void FolderFetchingHandler (double percentage); - public event FolderListChangedEventHandler FolderListChanged; - public delegate void FolderListChangedEventHandler (); + public event FolderListChangedHandler FolderListChanged; + public delegate void FolderListChangedHandler (); - public event FolderSizeChangedEventHandler FolderSizeChanged; - public delegate void FolderSizeChangedEventHandler (string folder_size); - - public event AvatarFetchedEventHandler AvatarFetched; - public delegate void AvatarFetchedEventHandler (); + public event AvatarFetchedHandler AvatarFetched; + public delegate void AvatarFetchedHandler (); - public event OnIdleEventHandler OnIdle; - public delegate void OnIdleEventHandler (); + public event OnIdleHandler OnIdle; + public delegate void OnIdleHandler (); - public event OnSyncingEventHandler OnSyncing; - public delegate void OnSyncingEventHandler (); + public event OnSyncingHandler OnSyncing; + public delegate void OnSyncingHandler (); - public event OnErrorEventHandler OnError; - public delegate void OnErrorEventHandler (); + public event OnErrorHandler OnError; + public delegate void OnErrorHandler (); - public event OnInvitationEventHandler OnInvitation; - public delegate void OnInvitationEventHandler (string server, string folder, string token); + public event OnInviteHandler OnInvite; + public delegate void OnInviteHandler (SparkleInvite invite); - public event ConflictNotificationRaisedEventHandler ConflictNotificationRaised; - public delegate void ConflictNotificationRaisedEventHandler (); + public event ConflictNotificationRaisedHandler ConflictNotificationRaised; + public delegate void ConflictNotificationRaisedHandler (); public event NotificationRaisedEventHandler NotificationRaised; - public delegate void NotificationRaisedEventHandler (string user_name, string user_email, - string message, string repository_path); + public delegate void NotificationRaisedEventHandler (SparkleChangeSet change_set); + + public event NoteNotificationRaisedEventHandler NoteNotificationRaised; + public delegate void NoteNotificationRaisedEventHandler (SparkleUser user, string folder_name); public abstract string PluginsPath { get; } private SparkleFetcherBase fetcher; private List failed_avatars = new List (); + private Object avatar_lock = new Object (); + // Short alias for the translations public static string _ (string s) { - return Program._ (s); + return Catalog.GetString (s); } @@ -107,8 +108,6 @@ namespace SparkleShare { if (CreateSparkleShareFolder ()) AddToBookmarks (); - FolderSize = GetFolderSize (); - if (FirstRun) SparkleConfig.DefaultConfig.SetConfigOption ("notifications", bool.TrueString); else @@ -128,35 +127,19 @@ namespace SparkleShare { if (FolderListChanged != null) FolderListChanged (); - - FolderSize = GetFolderSize (); - - if (FolderSizeChanged != null) - FolderSizeChanged (FolderSize); }; - watcher.Created += delegate (object o, FileSystemEventArgs args) { + SparkleInviteListener invite_listener = new SparkleInviteListener (1986); - // Handle invitations when the user saves an - // invitation into the SparkleShare folder - if (args.Name.EndsWith (".sparkle") && !FirstRun) { - XmlDocument xml_doc = new XmlDocument (); - xml_doc.Load (args.Name); + invite_listener.InviteReceived += delegate (SparkleInvite invite) { - string server = xml_doc.GetElementsByTagName ("server") [0].InnerText; - string folder = xml_doc.GetElementsByTagName ("folder") [0].InnerText; - string token = xml_doc.GetElementsByTagName ("token") [0].InnerText; - - // FIXME: this is broken :\ - if (OnInvitation != null) - OnInvitation (server, folder, token); - } + if (OnInvite != null && !FirstRun) + OnInvite (invite); }; - // FIXME: this should probably not called in a thread, because - // Repositories will not be valid until PopulateRepositories - // has been finished, but other functions use Repositories + invite_listener.Start (); + new Thread (new ThreadStart (PopulateRepositories)).Start (); } @@ -172,8 +155,8 @@ namespace SparkleShare { public bool AcceptInvitation (string server, string folder, string token) { // The location of the user's public key for SparkleShare - string public_key_file_path = SparkleHelpers.CombineMore (SparkleConfig.DefaultConfig.HomePath, ".ssh", - "sparkleshare." + UserEmail + ".key.pub"); + string public_key_file_path = SparkleHelpers.CombineMore (SparkleConfig.DefaultConfig.HomePath, + ".ssh", "sparkleshare." + UserEmail + ".key.pub"); if (!File.Exists (public_key_file_path)) return false; @@ -355,12 +338,6 @@ namespace SparkleShare { string event_entry_html = EventEntryHTML; string event_log = ""; - event_entry_html = event_entry_html - .Replace ("Add note", _ ("Add note")) - .Replace ("Show all", _ ("Show all")); - event_log_html = event_log_html - .Replace ("Show all", _ ("Show all")); - foreach (ActivityDay activity_day in activity_days) { string event_entries = ""; @@ -542,40 +519,46 @@ namespace SparkleShare { // Opens the SparkleShare folder or an (optional) subfolder public abstract void OpenSparkleShareFolder (string subfolder); + // Opens a file with the appropriate application + public abstract void OpenFile (string url); + // Fires events for the current syncing state public void UpdateState () { + bool has_syncing_repos = false; + bool has_unsynced_repos = false; + foreach (SparkleRepoBase repo in Repositories) { if (repo.Status == SyncStatus.SyncDown || repo.Status == SyncStatus.SyncUp || repo.IsBuffering) { - if (OnSyncing != null) - OnSyncing (); - - return; + has_syncing_repos = true; } else if (repo.HasUnsyncedChanges) { - if (OnError != null) - OnError (); - - return; + has_unsynced_repos = true; } } - if (OnIdle != null) - OnIdle (); - FolderSize = GetFolderSize (); + if (has_syncing_repos) { + if (OnSyncing != null) + OnSyncing (); - if (FolderSizeChanged != null) - FolderSizeChanged (FolderSize); + } else if (has_unsynced_repos) { + if (OnError != null) + OnError (); + + } else { + if (OnIdle != null) + OnIdle (); + } } // Adds a repository to the list of repositories - private void AddRepository (List RepositoryList, string folder_path) + private void AddRepository (string folder_path) { if (folder_path.Equals (SparkleConfig.DefaultConfig.TmpPath)) return; @@ -589,30 +572,28 @@ namespace SparkleShare { SparkleRepoBase repo = new SparkleRepoGit (folder_path, SparkleBackend.DefaultBackend); repo.NewChangeSet += delegate (SparkleChangeSet change_set) { - string message = FormatMessage (change_set); if (NotificationRaised != null) - NotificationRaised (change_set.User.Name, change_set.User.Email, message, repo.LocalPath); + NotificationRaised (change_set); }; - repo.NewNote += delegate (string user_name, string user_email) { - if (NotificationRaised != null) - NotificationRaised (user_name, user_email, - "added a note to " + Path.GetFileName (repo.LocalPath), repo.LocalPath); + repo.NewNote += delegate (SparkleUser user) { + if (NoteNotificationRaised != null) + NoteNotificationRaised (user, repo.Name); + }; repo.ConflictResolved += delegate { - Program.SetUiCulture(); if (ConflictNotificationRaised != null) ConflictNotificationRaised (); }; repo.SyncStatusChanged += delegate (SyncStatus status) { -/* if (status == SyncStatus.SyncUp) { - foreach (string path in repo.UnsyncedFilePaths) - Console.WriteLine (path); + if (status == SyncStatus.Idle) { + ProgressPercentage = 0.0; + ProgressSpeed = ""; } -*/ + if (status == SyncStatus.Idle || status == SyncStatus.SyncUp || status == SyncStatus.SyncDown || @@ -622,11 +603,18 @@ namespace SparkleShare { } }; + repo.SyncProgressChanged += delegate (double percentage, string speed) { + ProgressPercentage = percentage; + ProgressSpeed = speed; + + UpdateState (); + }; + repo.ChangesDetected += delegate { UpdateState (); }; - RepositoryList.Add (repo); + Repositories.Add (repo); } @@ -653,26 +641,19 @@ namespace SparkleShare { // folders in the SparkleShare folder private void PopulateRepositories () { - List TempRepositories = new List (); + Repositories = new List (); foreach (string folder_name in SparkleConfig.DefaultConfig.Folders) { string folder_path = new SparkleFolder (folder_name).FullPath; if (Directory.Exists (folder_path)) - AddRepository (TempRepositories, folder_path); + AddRepository (folder_path); else SparkleConfig.DefaultConfig.RemoveFolder (folder_name); } - Repositories = TempRepositories; - if (FolderListChanged != null) FolderListChanged (); - - FolderSize = GetFolderSize (); - - if (FolderSizeChanged != null) - FolderSizeChanged (FolderSize); } @@ -704,97 +685,37 @@ namespace SparkleShare { } - private string GetFolderSize () + public string GetSize (string folder_name) { - double folder_size = CalculateFolderSize ( - new DirectoryInfo (SparkleConfig.DefaultConfig.FoldersPath)); + double folder_size = 0; + /* TODO + foreach (SparkleRepoBase repo in + Repositories.GetRange (0, Repositories.Count)) { - return FormatFolderSize (folder_size); + folder_size += repo.Size + repo.HistorySize; + } + */ + return FormatSize (folder_size); } - private string FormatMessage (SparkleChangeSet change_set) + public string GetHistorySize (string folder_name) { - string file_name = ""; - string message = ""; + double folder_size = 0; + /* TODO + foreach (SparkleRepoBase repo in + Repositories.GetRange (0, Repositories.Count)) { - if (change_set.Added.Count > 0) { - file_name = change_set.Added [0]; - message = String.Format (_("added ‘{0}’"), file_name); + folder_size += repo.Size + repo.HistorySize; } - - if (change_set.MovedFrom.Count > 0) { - file_name = change_set.MovedFrom [0]; - message = String.Format (_("moved ‘{0}’"), file_name); - } - - if (change_set.Edited.Count > 0) { - file_name = change_set.Edited [0]; - message = String.Format (_("edited ‘{0}’"), file_name); - } - - if (change_set.Deleted.Count > 0) { - file_name = change_set.Deleted [0]; - message = String.Format (_("deleted ‘{0}’"), file_name); - } - - int changes_count = (change_set.Added.Count + - change_set.Edited.Count + - change_set.Deleted.Count + - change_set.MovedFrom.Count) - 1; - - if (changes_count > 0) { -#if __MonoCS__ - string msg = Catalog.GetPluralString ("and {0} more", "and {0} more", changes_count); - message += " " + String.Format (msg, changes_count); -#else - message += " " + String.Format ("and {0} more", changes_count); -#endif - - } else if (changes_count < 0) { - message += _("did something magical"); - } - - return message; - } // TODO: move to bubbles controller - - - // Recursively gets a folder's size in bytes - private double CalculateFolderSize (DirectoryInfo parent) - { - if (!Directory.Exists (parent.ToString ())) - return 0; - - double size = 0; - - // Ignore the temporary 'rebase-apply' and '.tmp' directories. This prevents potential - // crashes when files are being queried whilst the files have already been deleted. - if (parent.Name.Equals ("rebase-apply") || - parent.Name.Equals (".tmp")) - return 0; - - try { - foreach (FileInfo file in parent.GetFiles()) { - if (!file.Exists) - return 0; - - size += file.Length; - } - - foreach (DirectoryInfo directory in parent.GetDirectories()) - size += CalculateFolderSize (directory); - - } catch (Exception) { - return 0; - } - - return size; + */ + return FormatSize (folder_size); } // Format a file size nicely with small caps. // Example: 1048576 becomes "1 ᴍʙ" - private string FormatFolderSize (double byte_count) + public string FormatSize (double byte_count) { if (byte_count >= 1099511627776) return String.Format ("{0:##.##} ᴛʙ", Math.Round (byte_count / 1099511627776, 1)); @@ -976,7 +897,10 @@ namespace SparkleShare { // if not empty if (buffer.Length > 255) { avatar_fetched = true; - File.WriteAllBytes (avatar_file_path, buffer); + + lock (this.avatar_lock) + File.WriteAllBytes (avatar_file_path, buffer); + SparkleHelpers.DebugInfo ("Controller", "Fetched gravatar for " + email); } @@ -1003,7 +927,7 @@ namespace SparkleShare { } - public virtual string GetAvatar (string email, int size) + public string GetAvatar (string email, int size) { string avatar_file_path = SparkleHelpers.CombineMore ( Path.GetDirectoryName (SparkleConfig.DefaultConfig.FullPath), "icons", @@ -1069,7 +993,7 @@ namespace SparkleShare { if (i > 1) target_folder_name += " (" + i + ")"; - this.fetcher.Finished += delegate { + this.fetcher.Finished += delegate (string [] warnings) { // Needed to do the moving SparkleHelpers.ClearAttributes (tmp_folder); @@ -1078,28 +1002,24 @@ namespace SparkleShare { try { Directory.Move (tmp_folder, target_folder_path); + + SparkleConfig.DefaultConfig.AddFolder (target_folder_name, this.fetcher.RemoteUrl, backend); + AddRepository (target_folder_path); + + if (FolderFetched != null) + FolderFetched (warnings); + + if (FolderListChanged != null) + FolderListChanged (); + + this.fetcher.Dispose (); + + if (Directory.Exists (tmp_path)) + Directory.Delete (tmp_path, true); + } catch (Exception e) { SparkleHelpers.DebugInfo ("Controller", "Error moving folder: " + e.Message); } - - SparkleConfig.DefaultConfig.AddFolder (target_folder_name, this.fetcher.RemoteUrl, backend); - AddRepository (Repositories, target_folder_path); - - if (FolderFetched != null) - FolderFetched (); - - FolderSize = GetFolderSize (); - - if (FolderSizeChanged != null) - FolderSizeChanged (FolderSize); - - if (FolderListChanged != null) - FolderListChanged (); - - this.fetcher.Dispose (); - - if (Directory.Exists (tmp_path)) - Directory.Delete (tmp_path, true); }; @@ -1161,29 +1081,15 @@ namespace SparkleShare { } - public virtual void Quit () + public void Quit () { foreach (SparkleRepoBase repo in Repositories) repo.Dispose (); -#if __MonoCS__ Environment.Exit (0); -#else - System.Windows.Forms.Application.Exit (); -#endif } - // Checks to see if an email address is valid - public bool IsValidEmail (string email) - { - Regex regex = new Regex (@"^[A-Z0-9._%+-]+@[A-Z0-9.-]+\.[A-Z]{2,4}$", RegexOptions.IgnoreCase); - return regex.IsMatch (email); - } - - - - public void AddNoteToFolder (string folder_name, string revision, string note) { folder_name = folder_name.Replace ("%20", " "); @@ -1196,8 +1102,6 @@ namespace SparkleShare { } - - private string [] tango_palette = new string [] {"#eaab00", "#e37222", "#3892ab", "#33c2cb", "#19b271", "#9eab05", "#8599a8", "#9ca696", "#b88454", "#cc0033", "#8f6678", "#8c6cd0", "#796cbf", "#4060af", diff --git a/SparkleShare/SparkleEventLog.cs b/SparkleShare/SparkleEventLog.cs index 5ab9e152..ed28ba79 100755 --- a/SparkleShare/SparkleEventLog.cs +++ b/SparkleShare/SparkleEventLog.cs @@ -17,8 +17,6 @@ using System; using System.Collections.Generic; -using System.Diagnostics; -using System.Text.RegularExpressions; using System.Threading; using Gtk; @@ -31,6 +29,7 @@ namespace SparkleShare { public SparkleEventLogController Controller = new SparkleEventLogController (); + private Label size_label; private HBox layout_horizontal; private ComboBox combo_box; private EventBox content_wrapper; @@ -60,58 +59,41 @@ namespace SparkleShare { DeleteEvent += Close; + this.size_label = new Label () { + Markup = "Size: " + Controller.Size + " " + + "History: " + Controller.HistorySize + }; + VBox layout_vertical = new VBox (false, 0); this.spinner = new SparkleSpinner (22); this.content_wrapper = new EventBox (); this.scrolled_window = new ScrolledWindow (); - this.web_view = new WebView () { - Editable = false - }; + this.web_view = new WebView () { + Editable = false + }; - this.web_view.HoveringOverLink += delegate (object o, WebKit.HoveringOverLinkArgs args) { - this.link_status = args.Link; - }; + this.web_view.HoveringOverLink += delegate (object o, WebKit.HoveringOverLinkArgs args) { + this.link_status = args.Link; + }; - this.web_view.NavigationRequested += delegate (object o, WebKit.NavigationRequestedArgs args) { - if (args.Request.Uri == this.link_status) { - // TODO: controller - Process process = new Process (); - process.StartInfo.FileName = "xdg-open"; - process.StartInfo.Arguments = args.Request.Uri.Replace (" ", "\\ "); // Escape space-characters - process.Start (); + this.web_view.NavigationRequested += delegate (object o, WebKit.NavigationRequestedArgs args) { + if (args.Request.Uri == this.link_status) + SparkleEventLogController.LinkClicked (args.Request.Uri); - } else { - //TODO: controller - Regex regex = new Regex (@"(.+)~(.+)~(.+)"); - Match match = regex.Match (args.Request.Uri); - - if (match.Success) { - string folder_name = match.Groups [1].Value; - string revision = match.Groups [2].Value; - string note = match.Groups [3].Value; - - Thread thread = new Thread (new ThreadStart (delegate { - Program.Controller.AddNoteToFolder (folder_name, revision, note); - })); - - thread.Start (); - } - } - - // Don't follow HREFs (as this would cause a page refresh) - if (!args.Request.Uri.Equals ("file:")) - args.RetVal = 1; - }; + // Don't follow HREFs (as this would cause a page refresh) + if (!args.Request.Uri.Equals ("file:")) + args.RetVal = 1; + }; this.scrolled_window.Add (this.web_view); this.content_wrapper.Add (this.spinner); this.spinner.Start (); - this.layout_horizontal = new HBox (true, 0); - this.layout_horizontal.PackStart (new Label (""), true, true, 0); - this.layout_horizontal.PackStart (new Label (""), true, true, 0); + this.layout_horizontal = new HBox (false, 0); + this.layout_horizontal.PackStart (this.size_label, true, true, 0); + this.layout_horizontal.PackStart (new Label (" "), false, false, 0); layout_vertical.PackStart (this.layout_horizontal, false, false, 0); layout_vertical.PackStart (CreateShortcutsBar (), false, false, 0); @@ -147,6 +129,15 @@ namespace SparkleShare { this.content_wrapper.ShowAll (); }); }; + + Controller.UpdateSizeInfoEvent += delegate (string size, string history_size) { + Application.Invoke (delegate { + this.size_label.Markup = "Size: " + size + " " + + "History: " + history_size; + + this.size_label.ShowAll (); + }); + }; } @@ -166,7 +157,7 @@ namespace SparkleShare { ListStore store = new ListStore (typeof (string)); - store.AppendValues (_("All Folders")); + store.AppendValues (_("All Projects")); store.AppendValues ("---"); foreach (string folder in folders) diff --git a/SparkleShare/SparkleEventLogController.cs b/SparkleShare/SparkleEventLogController.cs index 98d95ded..9e9cb229 100755 --- a/SparkleShare/SparkleEventLogController.cs +++ b/SparkleShare/SparkleEventLogController.cs @@ -18,6 +18,8 @@ using System; using System.Collections.Generic; using System.Diagnostics; +using System.IO; +using System.Text.RegularExpressions; using System.Threading; using SparkleLib; @@ -32,9 +34,14 @@ namespace SparkleShare { public event UpdateChooserEventHandler UpdateChooserEvent; public delegate void UpdateChooserEventHandler (string [] folders); + public event UpdateSizeInfoEventHandler UpdateSizeInfoEvent; + public delegate void UpdateSizeInfoEventHandler (string size, string history_size); + public event ContentLoadingEventHandler ContentLoadingEvent; public delegate void ContentLoadingEventHandler (); + private string selected_folder; + public string SelectedFolder { get { @@ -61,6 +68,9 @@ namespace SparkleShare { if (UpdateContentEvent != null) UpdateContentEvent (html); + + if (UpdateSizeInfoEvent != null) + UpdateSizeInfoEvent (Size, HistorySize); })); thread.Start (); @@ -80,8 +90,35 @@ namespace SparkleShare { } } + public string Size { + get { + double size = 0; - private string selected_folder; + foreach (SparkleRepoBase repo in Program.Controller.Repositories) { + if (this.selected_folder == null) + size += repo.Size; + else if (this.selected_folder.Equals (repo.Name)) + return Program.Controller.FormatSize (repo.Size); + } + + return Program.Controller.FormatSize (size); + } + } + + public string HistorySize { + get { + double size = 0; + + foreach (SparkleRepoBase repo in Program.Controller.Repositories) { + if (this.selected_folder == null) + size += repo.HistorySize; + else if (this.selected_folder.Equals (repo.Name)) + return Program.Controller.FormatSize (repo.HistorySize); + } + + return Program.Controller.FormatSize (size); + } + } public SparkleEventLogController () @@ -94,6 +131,9 @@ namespace SparkleShare { Program.Controller.OnIdle += delegate { if (UpdateContentEvent != null) UpdateContentEvent (HTML); + + if (UpdateSizeInfoEvent != null) + UpdateSizeInfoEvent (Size, HistorySize); }; Program.Controller.FolderListChanged += delegate { @@ -108,12 +148,34 @@ namespace SparkleShare { if (UpdateContentEvent != null) UpdateContentEvent (HTML); - }; - Program.Controller.NotificationRaised += delegate { - if (UpdateContentEvent != null) - UpdateContentEvent (HTML); + if (UpdateSizeInfoEvent != null) + UpdateSizeInfoEvent (Size, HistorySize); }; } + + + public static void LinkClicked (string url) + { + if (url.StartsWith (Path.VolumeSeparatorChar.ToString ())) { + Program.Controller.OpenFile (url); + + } else { + Regex regex = new Regex (@"(.+)~(.+)~(.+)"); + Match match = regex.Match (url); + + if (match.Success) { + string folder_name = match.Groups [1].Value; + string revision = match.Groups [2].Value; + string note = match.Groups [3].Value; + + Thread thread = new Thread (new ThreadStart (delegate { + Program.Controller.AddNoteToFolder (folder_name, revision, note); + })); + + thread.Start (); + } + } + } } } diff --git a/SparkleShare/SparkleInvite.cs b/SparkleShare/SparkleInvite.cs new file mode 100644 index 00000000..40b36a97 --- /dev/null +++ b/SparkleShare/SparkleInvite.cs @@ -0,0 +1,199 @@ +// SparkleShare, a collaboration and sharing tool. +// Copyright (C) 2010 Hylke Bons (hylkebons@gmail.com) +// +// This program is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. +// +// You should have received a copy of the GNU General Public License +// along with this program. If not, see (http://www.gnu.org/licenses/). + + +using System; +using System.IO; +using System.Net; +using System.Net.Sockets; +using System.Text; +using System.Xml; +using System.Threading; + +using SparkleLib; + +namespace SparkleShare { + + public class SparkleInvite { + + public readonly Uri FullAddress; + public readonly string Token; + + public string Host { + get { + return FullAddress.Host; + } + } + + public string Path { + get { + return FullAddress.AbsolutePath; + } + } + + + public SparkleInvite (string host, string path, string token) + { + if (path.StartsWith ("/")) + path = path.Substring (1); + + if (!host.EndsWith ("/")) + host = host + "/"; + + FullAddress = new Uri ("ssh://" + host + path); + Token = token; + } + + + public SparkleInvite (string xml_file_path) + { + // TODO + } + } + + + public class SparkleInviteListener { + + public event InviteReceivedHandler InviteReceived; + public delegate void InviteReceivedHandler (SparkleInvite invite); + + private Thread thread; + private TcpListener tcp_listener; + + + public SparkleInviteListener (int port) + { + this.tcp_listener = new TcpListener (IPAddress.Loopback, port); + this.thread = new Thread(new ThreadStart (Listen)); + } + + + public void Start () + { + this.thread.Start (); + } + + + private void Listen () + { + this.tcp_listener.Start (); + + while (true) + { + // Blocks until a client connects + TcpClient client = this.tcp_listener.AcceptTcpClient (); + + // Create a thread to handle communications + Thread client_thread = new Thread (HandleClient); + client_thread.Start (client); + } + } + + + private void HandleClient (object client) + { + TcpClient tcp_client = (TcpClient) client; + NetworkStream client_stream = tcp_client.GetStream (); + + byte [] message = new byte [4096]; + int bytes_read; + + while (true) + { + bytes_read = 0; + + try { + // Blocks until the client sends a message + bytes_read = client_stream.Read (message, 0, 4096); + + } catch { + Console.WriteLine ("Socket error..."); + } + + // The client has disconnected + if (bytes_read == 0) + break; + + ASCIIEncoding encoding = new ASCIIEncoding (); + string received_message = encoding.GetString (message, 0, bytes_read); + string invite_xml = ""; + + if (received_message.StartsWith (Uri.UriSchemeHttp) || + received_message.StartsWith (Uri.UriSchemeHttps)) { + + WebClient web_client = new WebClient (); + + try { + // Fetch the invite file + byte [] buffer = web_client.DownloadData (received_message); + SparkleHelpers.DebugInfo ("Invite", "Received: " + received_message); + + invite_xml = ASCIIEncoding.ASCII.GetString (buffer); + + } catch (WebException e) { + SparkleHelpers.DebugInfo ("Invite", "Failed downloading: " + + received_message + " " + e.Message); + continue; + } + + } else if (received_message.StartsWith (Uri.UriSchemeFile)) { + try { + received_message = received_message.Replace (Uri.UriSchemeFile + "://", ""); + invite_xml = File.ReadAllText (received_message); + + } catch { + SparkleHelpers.DebugInfo ("Invite", "Failed opening: " + received_message); + continue; + } + + } else { + SparkleHelpers.DebugInfo ("Invite", + "Path to invite must use either the file:// or http(s):// scheme"); + + continue; + } + + XmlDocument xml_document = new XmlDocument (); + XmlNode node; + string host = ""; + string path = ""; + string token = ""; + + try { + xml_document.LoadXml (invite_xml); + + node = xml_document.SelectSingleNode ("/sparkleshare/invite/host/text()"); + if (node != null) { host = node.Value; } + + node = xml_document.SelectSingleNode ("/sparkleshare/invite/path/text()"); + if (node != null) { path = node.Value; } + + node = xml_document.SelectSingleNode ("/sparkleshare/invite/token/text()"); + if (node != null) { token = node.Value; } + + } catch (XmlException e) { + SparkleHelpers.DebugInfo ("Invite", "Not valid XML: " + received_message + " " + e.Message); + return; + } + + if (InviteReceived != null) + InviteReceived (new SparkleInvite (host, path, token)); + } + + tcp_client.Close (); + } + } +} diff --git a/SparkleShare/SparkleOptions.cs b/SparkleShare/SparkleOptions.cs new file mode 100644 index 00000000..ea321f0e --- /dev/null +++ b/SparkleShare/SparkleOptions.cs @@ -0,0 +1,1101 @@ +// +// Options.cs +// +// Authors: +// Jonathan Pryor +// +// Copyright (C) 2008 Novell (http://www.novell.com) +// +// Permission is hereby granted, free of charge, to any person obtaining +// a copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to +// permit persons to whom the Software is furnished to do so, subject to +// the following conditions: +// +// The above copyright notice and this permission notice shall be +// included in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +// + +// Compile With: +// gmcs -debug+ -r:System.Core Options.cs -o:NDesk.Options.dll +// gmcs -debug+ -d:LINQ -r:System.Core Options.cs -o:NDesk.Options.dll +// +// The LINQ version just changes the implementation of +// OptionSet.Parse(IEnumerable), and confers no semantic changes. + +// +// A Getopt::Long-inspired option parsing library for C#. +// +// NDesk.Options.OptionSet is built upon a key/value table, where the +// key is a option format string and the value is a delegate that is +// invoked when the format string is matched. +// +// Option format strings: +// Regex-like BNF Grammar: +// name: .+ +// type: [=:] +// sep: ( [^{}]+ | '{' .+ '}' )? +// aliases: ( name type sep ) ( '|' name type sep )* +// +// Each '|'-delimited name is an alias for the associated action. If the +// format string ends in a '=', it has a required value. If the format +// string ends in a ':', it has an optional value. If neither '=' or ':' +// is present, no value is supported. `=' or `:' need only be defined on one +// alias, but if they are provided on more than one they must be consistent. +// +// Each alias portion may also end with a "key/value separator", which is used +// to split option values if the option accepts > 1 value. If not specified, +// it defaults to '=' and ':'. If specified, it can be any character except +// '{' and '}' OR the *string* between '{' and '}'. If no separator should be +// used (i.e. the separate values should be distinct arguments), then "{}" +// should be used as the separator. +// +// Options are extracted either from the current option by looking for +// the option name followed by an '=' or ':', or is taken from the +// following option IFF: +// - The current option does not contain a '=' or a ':' +// - The current option requires a value (i.e. not a Option type of ':') +// +// The `name' used in the option format string does NOT include any leading +// option indicator, such as '-', '--', or '/'. All three of these are +// permitted/required on any named option. +// +// Option bundling is permitted so long as: +// - '-' is used to start the option group +// - all of the bundled options are a single character +// - at most one of the bundled options accepts a value, and the value +// provided starts from the next character to the end of the string. +// +// This allows specifying '-a -b -c' as '-abc', and specifying '-D name=value' +// as '-Dname=value'. +// +// Option processing is disabled by specifying "--". All options after "--" +// are returned by OptionSet.Parse() unchanged and unprocessed. +// +// Unprocessed options are returned from OptionSet.Parse(). +// +// Examples: +// int verbose = 0; +// OptionSet p = new OptionSet () +// .Add ("v", v => ++verbose) +// .Add ("name=|value=", v => Console.WriteLine (v)); +// p.Parse (new string[]{"-v", "--v", "/v", "-name=A", "/name", "B", "extra"}); +// +// The above would parse the argument string array, and would invoke the +// lambda expression three times, setting `verbose' to 3 when complete. +// It would also print out "A" and "B" to standard output. +// The returned array would contain the string "extra". +// +// C# 3.0 collection initializers are supported and encouraged: +// var p = new OptionSet () { +// { "h|?|help", v => ShowHelp () }, +// }; +// +// System.ComponentModel.TypeConverter is also supported, allowing the use of +// custom data types in the callback type; TypeConverter.ConvertFromString() +// is used to convert the value option to an instance of the specified +// type: +// +// var p = new OptionSet () { +// { "foo=", (Foo f) => Console.WriteLine (f.ToString ()) }, +// }; +// +// Random other tidbits: +// - Boolean options (those w/o '=' or ':' in the option format string) +// are explicitly enabled if they are followed with '+', and explicitly +// disabled if they are followed with '-': +// string a = null; +// var p = new OptionSet () { +// { "a", s => a = s }, +// }; +// p.Parse (new string[]{"-a"}); // sets v != null +// p.Parse (new string[]{"-a+"}); // sets v != null +// p.Parse (new string[]{"-a-"}); // sets v == null +// + +using System; +using System.Collections; +using System.Collections.Generic; +using System.Collections.ObjectModel; +using System.ComponentModel; +using System.Globalization; +using System.IO; +using System.Runtime.Serialization; +using System.Security.Permissions; +using System.Text; +using System.Text.RegularExpressions; + +#if LINQ +using System.Linq; +#endif + +#if TEST +using NDesk.Options; +#endif + +#if NDESK_OPTIONS +namespace NDesk.Options +#else +namespace SparkleShare +#endif +{ + public class OptionValueCollection : IList, IList { + + List values = new List (); + OptionContext c; + + internal OptionValueCollection (OptionContext c) + { + this.c = c; + } + + #region ICollection + void ICollection.CopyTo (Array array, int index) {(values as ICollection).CopyTo (array, index);} + bool ICollection.IsSynchronized {get {return (values as ICollection).IsSynchronized;}} + object ICollection.SyncRoot {get {return (values as ICollection).SyncRoot;}} + #endregion + + #region ICollection + public void Add (string item) {values.Add (item);} + public void Clear () {values.Clear ();} + public bool Contains (string item) {return values.Contains (item);} + public void CopyTo (string[] array, int arrayIndex) {values.CopyTo (array, arrayIndex);} + public bool Remove (string item) {return values.Remove (item);} + public int Count {get {return values.Count;}} + public bool IsReadOnly {get {return false;}} + #endregion + + #region IEnumerable + IEnumerator IEnumerable.GetEnumerator () {return values.GetEnumerator ();} + #endregion + + #region IEnumerable + public IEnumerator GetEnumerator () {return values.GetEnumerator ();} + #endregion + + #region IList + int IList.Add (object value) {return (values as IList).Add (value);} + bool IList.Contains (object value) {return (values as IList).Contains (value);} + int IList.IndexOf (object value) {return (values as IList).IndexOf (value);} + void IList.Insert (int index, object value) {(values as IList).Insert (index, value);} + void IList.Remove (object value) {(values as IList).Remove (value);} + void IList.RemoveAt (int index) {(values as IList).RemoveAt (index);} + bool IList.IsFixedSize {get {return false;}} + object IList.this [int index] {get {return this [index];} set {(values as IList)[index] = value;}} + #endregion + + #region IList + public int IndexOf (string item) {return values.IndexOf (item);} + public void Insert (int index, string item) {values.Insert (index, item);} + public void RemoveAt (int index) {values.RemoveAt (index);} + + private void AssertValid (int index) + { + if (c.Option == null) + throw new InvalidOperationException ("OptionContext.Option is null."); + if (index >= c.Option.MaxValueCount) + throw new ArgumentOutOfRangeException ("index"); + if (c.Option.OptionValueType == OptionValueType.Required && + index >= values.Count) + throw new OptionException (string.Format ( + c.OptionSet.MessageLocalizer ("Missing required value for option '{0}'."), c.OptionName), + c.OptionName); + } + + public string this [int index] { + get { + AssertValid (index); + return index >= values.Count ? null : values [index]; + } + set { + values [index] = value; + } + } + #endregion + + public List ToList () + { + return new List (values); + } + + public string[] ToArray () + { + return values.ToArray (); + } + + public override string ToString () + { + return string.Join (", ", values.ToArray ()); + } + } + + public class OptionContext { + private Option option; + private string name; + private int index; + private OptionSet set; + private OptionValueCollection c; + + public OptionContext (OptionSet set) + { + this.set = set; + this.c = new OptionValueCollection (this); + } + + public Option Option { + get {return option;} + set {option = value;} + } + + public string OptionName { + get {return name;} + set {name = value;} + } + + public int OptionIndex { + get {return index;} + set {index = value;} + } + + public OptionSet OptionSet { + get {return set;} + } + + public OptionValueCollection OptionValues { + get {return c;} + } + } + + public enum OptionValueType { + None, + Optional, + Required, + } + + public abstract class Option { + string prototype, description; + string[] names; + OptionValueType type; + int count; + string[] separators; + + protected Option (string prototype, string description) + : this (prototype, description, 1) + { + } + + protected Option (string prototype, string description, int maxValueCount) + { + if (prototype == null) + throw new ArgumentNullException ("prototype"); + if (prototype.Length == 0) + throw new ArgumentException ("Cannot be the empty string.", "prototype"); + if (maxValueCount < 0) + throw new ArgumentOutOfRangeException ("maxValueCount"); + + this.prototype = prototype; + this.names = prototype.Split ('|'); + this.description = description; + this.count = maxValueCount; + this.type = ParsePrototype (); + + if (this.count == 0 && type != OptionValueType.None) + throw new ArgumentException ( + "Cannot provide maxValueCount of 0 for OptionValueType.Required or " + + "OptionValueType.Optional.", + "maxValueCount"); + if (this.type == OptionValueType.None && maxValueCount > 1) + throw new ArgumentException ( + string.Format ("Cannot provide maxValueCount of {0} for OptionValueType.None.", maxValueCount), + "maxValueCount"); + if (Array.IndexOf (names, "<>") >= 0 && + ((names.Length == 1 && this.type != OptionValueType.None) || + (names.Length > 1 && this.MaxValueCount > 1))) + throw new ArgumentException ( + "The default option handler '<>' cannot require values.", + "prototype"); + } + + public string Prototype {get {return prototype;}} + public string Description {get {return description;}} + public OptionValueType OptionValueType {get {return type;}} + public int MaxValueCount {get {return count;}} + + public string[] GetNames () + { + return (string[]) names.Clone (); + } + + public string[] GetValueSeparators () + { + if (separators == null) + return new string [0]; + return (string[]) separators.Clone (); + } + + protected static T Parse (string value, OptionContext c) + { + Type tt = typeof (T); + bool nullable = tt.IsValueType && tt.IsGenericType && + !tt.IsGenericTypeDefinition && + tt.GetGenericTypeDefinition () == typeof (Nullable<>); + Type targetType = nullable ? tt.GetGenericArguments () [0] : typeof (T); + TypeConverter conv = TypeDescriptor.GetConverter (targetType); + T t = default (T); + try { + if (value != null) + t = (T) conv.ConvertFromString (value); + } + catch (Exception e) { + throw new OptionException ( + string.Format ( + c.OptionSet.MessageLocalizer ("Could not convert string `{0}' to type {1} for option `{2}'."), + value, targetType.Name, c.OptionName), + c.OptionName, e); + } + return t; + } + + internal string[] Names {get {return names;}} + internal string[] ValueSeparators {get {return separators;}} + + static readonly char[] NameTerminator = new char[]{'=', ':'}; + + private OptionValueType ParsePrototype () + { + char type = '\0'; + List seps = new List (); + for (int i = 0; i < names.Length; ++i) { + string name = names [i]; + if (name.Length == 0) + throw new ArgumentException ("Empty option names are not supported.", "prototype"); + + int end = name.IndexOfAny (NameTerminator); + if (end == -1) + continue; + names [i] = name.Substring (0, end); + if (type == '\0' || type == name [end]) + type = name [end]; + else + throw new ArgumentException ( + string.Format ("Conflicting option types: '{0}' vs. '{1}'.", type, name [end]), + "prototype"); + AddSeparators (name, end, seps); + } + + if (type == '\0') + return OptionValueType.None; + + if (count <= 1 && seps.Count != 0) + throw new ArgumentException ( + string.Format ("Cannot provide key/value separators for Options taking {0} value(s).", count), + "prototype"); + if (count > 1) { + if (seps.Count == 0) + this.separators = new string[]{":", "="}; + else if (seps.Count == 1 && seps [0].Length == 0) + this.separators = null; + else + this.separators = seps.ToArray (); + } + + return type == '=' ? OptionValueType.Required : OptionValueType.Optional; + } + + private static void AddSeparators (string name, int end, ICollection seps) + { + int start = -1; + for (int i = end+1; i < name.Length; ++i) { + switch (name [i]) { + case '{': + if (start != -1) + throw new ArgumentException ( + string.Format ("Ill-formed name/value separator found in \"{0}\".", name), + "prototype"); + start = i+1; + break; + case '}': + if (start == -1) + throw new ArgumentException ( + string.Format ("Ill-formed name/value separator found in \"{0}\".", name), + "prototype"); + seps.Add (name.Substring (start, i-start)); + start = -1; + break; + default: + if (start == -1) + seps.Add (name [i].ToString ()); + break; + } + } + if (start != -1) + throw new ArgumentException ( + string.Format ("Ill-formed name/value separator found in \"{0}\".", name), + "prototype"); + } + + public void Invoke (OptionContext c) + { + OnParseComplete (c); + c.OptionName = null; + c.Option = null; + c.OptionValues.Clear (); + } + + protected abstract void OnParseComplete (OptionContext c); + + public override string ToString () + { + return Prototype; + } + } + + [Serializable] + public class OptionException : Exception { + private string option; + + public OptionException () + { + } + + public OptionException (string message, string optionName) + : base (message) + { + this.option = optionName; + } + + public OptionException (string message, string optionName, Exception innerException) + : base (message, innerException) + { + this.option = optionName; + } + + protected OptionException (SerializationInfo info, StreamingContext context) + : base (info, context) + { + this.option = info.GetString ("OptionName"); + } + + public string OptionName { + get {return this.option;} + } + + [SecurityPermission (SecurityAction.LinkDemand, SerializationFormatter = true)] + public override void GetObjectData (SerializationInfo info, StreamingContext context) + { + base.GetObjectData (info, context); + info.AddValue ("OptionName", option); + } + } + + public delegate void OptionAction (TKey key, TValue value); + + public class OptionSet : KeyedCollection + { + public OptionSet () + : this (delegate (string f) {return f;}) + { + } + + public OptionSet (Converter localizer) + { + this.localizer = localizer; + } + + Converter localizer; + + public Converter MessageLocalizer { + get {return localizer;} + } + + protected override string GetKeyForItem (Option item) + { + if (item == null) + throw new ArgumentNullException ("option"); + if (item.Names != null && item.Names.Length > 0) + return item.Names [0]; + // This should never happen, as it's invalid for Option to be + // constructed w/o any names. + throw new InvalidOperationException ("Option has no names!"); + } + + [Obsolete ("Use KeyedCollection.this[string]")] + protected Option GetOptionForName (string option) + { + if (option == null) + throw new ArgumentNullException ("option"); + try { + return base [option]; + } + catch (KeyNotFoundException) { + return null; + } + } + + protected override void InsertItem (int index, Option item) + { + base.InsertItem (index, item); + AddImpl (item); + } + + protected override void RemoveItem (int index) + { + base.RemoveItem (index); + Option p = Items [index]; + // KeyedCollection.RemoveItem() handles the 0th item + for (int i = 1; i < p.Names.Length; ++i) { + Dictionary.Remove (p.Names [i]); + } + } + + protected override void SetItem (int index, Option item) + { + base.SetItem (index, item); + RemoveItem (index); + AddImpl (item); + } + + private void AddImpl (Option option) + { + if (option == null) + throw new ArgumentNullException ("option"); + List added = new List (option.Names.Length); + try { + // KeyedCollection.InsertItem/SetItem handle the 0th name. + for (int i = 1; i < option.Names.Length; ++i) { + Dictionary.Add (option.Names [i], option); + added.Add (option.Names [i]); + } + } + catch (Exception) { + foreach (string name in added) + Dictionary.Remove (name); + throw; + } + } + + public new OptionSet Add (Option option) + { + base.Add (option); + return this; + } + + sealed class ActionOption : Option { + Action action; + + public ActionOption (string prototype, string description, int count, Action action) + : base (prototype, description, count) + { + if (action == null) + throw new ArgumentNullException ("action"); + this.action = action; + } + + protected override void OnParseComplete (OptionContext c) + { + action (c.OptionValues); + } + } + + public OptionSet Add (string prototype, Action action) + { + return Add (prototype, null, action); + } + + public OptionSet Add (string prototype, string description, Action action) + { + if (action == null) + throw new ArgumentNullException ("action"); + Option p = new ActionOption (prototype, description, 1, + delegate (OptionValueCollection v) { action (v [0]); }); + base.Add (p); + return this; + } + + public OptionSet Add (string prototype, OptionAction action) + { + return Add (prototype, null, action); + } + + public OptionSet Add (string prototype, string description, OptionAction action) + { + if (action == null) + throw new ArgumentNullException ("action"); + Option p = new ActionOption (prototype, description, 2, + delegate (OptionValueCollection v) {action (v [0], v [1]);}); + base.Add (p); + return this; + } + + sealed class ActionOption : Option { + Action action; + + public ActionOption (string prototype, string description, Action action) + : base (prototype, description, 1) + { + if (action == null) + throw new ArgumentNullException ("action"); + this.action = action; + } + + protected override void OnParseComplete (OptionContext c) + { + action (Parse (c.OptionValues [0], c)); + } + } + + sealed class ActionOption : Option { + OptionAction action; + + public ActionOption (string prototype, string description, OptionAction action) + : base (prototype, description, 2) + { + if (action == null) + throw new ArgumentNullException ("action"); + this.action = action; + } + + protected override void OnParseComplete (OptionContext c) + { + action ( + Parse (c.OptionValues [0], c), + Parse (c.OptionValues [1], c)); + } + } + + public OptionSet Add (string prototype, Action action) + { + return Add (prototype, null, action); + } + + public OptionSet Add (string prototype, string description, Action action) + { + return Add (new ActionOption (prototype, description, action)); + } + + public OptionSet Add (string prototype, OptionAction action) + { + return Add (prototype, null, action); + } + + public OptionSet Add (string prototype, string description, OptionAction action) + { + return Add (new ActionOption (prototype, description, action)); + } + + protected virtual OptionContext CreateOptionContext () + { + return new OptionContext (this); + } + +#if LINQ + public List Parse (IEnumerable arguments) + { + bool process = true; + OptionContext c = CreateOptionContext (); + c.OptionIndex = -1; + var def = GetOptionForName ("<>"); + var unprocessed = + from argument in arguments + where ++c.OptionIndex >= 0 && (process || def != null) + ? process + ? argument == "--" + ? (process = false) + : !Parse (argument, c) + ? def != null + ? Unprocessed (null, def, c, argument) + : true + : false + : def != null + ? Unprocessed (null, def, c, argument) + : true + : true + select argument; + List r = unprocessed.ToList (); + if (c.Option != null) + c.Option.Invoke (c); + return r; + } +#else + public List Parse (IEnumerable arguments) + { + OptionContext c = CreateOptionContext (); + c.OptionIndex = -1; + bool process = true; + List unprocessed = new List (); + Option def = Contains ("<>") ? this ["<>"] : null; + foreach (string argument in arguments) { + ++c.OptionIndex; + if (argument == "--") { + process = false; + continue; + } + if (!process) { + Unprocessed (unprocessed, def, c, argument); + continue; + } + if (!Parse (argument, c)) + Unprocessed (unprocessed, def, c, argument); + } + if (c.Option != null) + c.Option.Invoke (c); + return unprocessed; + } +#endif + + private static bool Unprocessed (ICollection extra, Option def, OptionContext c, string argument) + { + if (def == null) { + extra.Add (argument); + return false; + } + c.OptionValues.Add (argument); + c.Option = def; + c.Option.Invoke (c); + return false; + } + + private readonly Regex ValueOption = new Regex ( + @"^(?--|-|/)(?[^:=]+)((?[:=])(?.*))?$"); + + protected bool GetOptionParts (string argument, out string flag, out string name, out string sep, out string value) + { + if (argument == null) + throw new ArgumentNullException ("argument"); + + flag = name = sep = value = null; + Match m = ValueOption.Match (argument); + if (!m.Success) { + return false; + } + flag = m.Groups ["flag"].Value; + name = m.Groups ["name"].Value; + if (m.Groups ["sep"].Success && m.Groups ["value"].Success) { + sep = m.Groups ["sep"].Value; + value = m.Groups ["value"].Value; + } + return true; + } + + protected virtual bool Parse (string argument, OptionContext c) + { + if (c.Option != null) { + ParseValue (argument, c); + return true; + } + + string f, n, s, v; + if (!GetOptionParts (argument, out f, out n, out s, out v)) + return false; + + Option p; + if (Contains (n)) { + p = this [n]; + c.OptionName = f + n; + c.Option = p; + switch (p.OptionValueType) { + case OptionValueType.None: + c.OptionValues.Add (n); + c.Option.Invoke (c); + break; + case OptionValueType.Optional: + case OptionValueType.Required: + ParseValue (v, c); + break; + } + return true; + } + // no match; is it a bool option? + if (ParseBool (argument, n, c)) + return true; + // is it a bundled option? + if (ParseBundledValue (f, string.Concat (n + s + v), c)) + return true; + + return false; + } + + private void ParseValue (string option, OptionContext c) + { + if (option != null) + foreach (string o in c.Option.ValueSeparators != null + ? option.Split (c.Option.ValueSeparators, StringSplitOptions.None) + : new string[]{option}) { + c.OptionValues.Add (o); + } + if (c.OptionValues.Count == c.Option.MaxValueCount || + c.Option.OptionValueType == OptionValueType.Optional) + c.Option.Invoke (c); + else if (c.OptionValues.Count > c.Option.MaxValueCount) { + throw new OptionException (localizer (string.Format ( + "Error: Found {0} option values when expecting {1}.", + c.OptionValues.Count, c.Option.MaxValueCount)), + c.OptionName); + } + } + + private bool ParseBool (string option, string n, OptionContext c) + { + Option p; + string rn; + if (n.Length >= 1 && (n [n.Length-1] == '+' || n [n.Length-1] == '-') && + Contains ((rn = n.Substring (0, n.Length-1)))) { + p = this [rn]; + string v = n [n.Length-1] == '+' ? option : null; + c.OptionName = option; + c.Option = p; + c.OptionValues.Add (v); + p.Invoke (c); + return true; + } + return false; + } + + private bool ParseBundledValue (string f, string n, OptionContext c) + { + if (f != "-") + return false; + for (int i = 0; i < n.Length; ++i) { + Option p; + string opt = f + n [i].ToString (); + string rn = n [i].ToString (); + if (!Contains (rn)) { + if (i == 0) + return false; + throw new OptionException (string.Format (localizer ( + "Cannot bundle unregistered option '{0}'."), opt), opt); + } + p = this [rn]; + switch (p.OptionValueType) { + case OptionValueType.None: + Invoke (c, opt, n, p); + break; + case OptionValueType.Optional: + case OptionValueType.Required: { + string v = n.Substring (i+1); + c.Option = p; + c.OptionName = opt; + ParseValue (v.Length != 0 ? v : null, c); + return true; + } + default: + throw new InvalidOperationException ("Unknown OptionValueType: " + p.OptionValueType); + } + } + return true; + } + + private static void Invoke (OptionContext c, string name, string value, Option option) + { + c.OptionName = name; + c.Option = option; + c.OptionValues.Add (value); + option.Invoke (c); + } + + private const int OptionWidth = 29; + + public void WriteOptionDescriptions (TextWriter o) + { + foreach (Option p in this) { + int written = 0; + if (!WriteOptionPrototype (o, p, ref written)) + continue; + + if (written < OptionWidth) + o.Write (new string (' ', OptionWidth - written)); + else { + o.WriteLine (); + o.Write (new string (' ', OptionWidth)); + } + + bool indent = false; + string prefix = new string (' ', OptionWidth+2); + foreach (string line in GetLines (localizer (GetDescription (p.Description)))) { + if (indent) + o.Write (prefix); + o.WriteLine (line); + indent = true; + } + } + } + + bool WriteOptionPrototype (TextWriter o, Option p, ref int written) + { + string[] names = p.Names; + + int i = GetNextOptionIndex (names, 0); + if (i == names.Length) + return false; + + if (names [i].Length == 1) { + Write (o, ref written, " -"); + Write (o, ref written, names [0]); + } + else { + Write (o, ref written, " --"); + Write (o, ref written, names [0]); + } + + for ( i = GetNextOptionIndex (names, i+1); + i < names.Length; i = GetNextOptionIndex (names, i+1)) { + Write (o, ref written, ", "); + Write (o, ref written, names [i].Length == 1 ? "-" : "--"); + Write (o, ref written, names [i]); + } + + if (p.OptionValueType == OptionValueType.Optional || + p.OptionValueType == OptionValueType.Required) { + if (p.OptionValueType == OptionValueType.Optional) { + Write (o, ref written, localizer ("[")); + } + Write (o, ref written, localizer ("=" + GetArgumentName (0, p.MaxValueCount, p.Description))); + string sep = p.ValueSeparators != null && p.ValueSeparators.Length > 0 + ? p.ValueSeparators [0] + : " "; + for (int c = 1; c < p.MaxValueCount; ++c) { + Write (o, ref written, localizer (sep + GetArgumentName (c, p.MaxValueCount, p.Description))); + } + if (p.OptionValueType == OptionValueType.Optional) { + Write (o, ref written, localizer ("]")); + } + } + return true; + } + + static int GetNextOptionIndex (string[] names, int i) + { + while (i < names.Length && names [i] == "<>") { + ++i; + } + return i; + } + + static void Write (TextWriter o, ref int n, string s) + { + n += s.Length; + o.Write (s); + } + + private static string GetArgumentName (int index, int maxIndex, string description) + { + if (description == null) + return maxIndex == 1 ? "VALUE" : "VALUE" + (index + 1); + string[] nameStart; + if (maxIndex == 1) + nameStart = new string[]{"{0:", "{"}; + else + nameStart = new string[]{"{" + index + ":"}; + for (int i = 0; i < nameStart.Length; ++i) { + int start, j = 0; + do { + start = description.IndexOf (nameStart [i], j); + } while (start >= 0 && j != 0 ? description [j++ - 1] == '{' : false); + if (start == -1) + continue; + int end = description.IndexOf ("}", start); + if (end == -1) + continue; + return description.Substring (start + nameStart [i].Length, end - start - nameStart [i].Length); + } + return maxIndex == 1 ? "VALUE" : "VALUE" + (index + 1); + } + + private static string GetDescription (string description) + { + if (description == null) + return string.Empty; + StringBuilder sb = new StringBuilder (description.Length); + int start = -1; + for (int i = 0; i < description.Length; ++i) { + switch (description [i]) { + case '{': + if (i == start) { + sb.Append ('{'); + start = -1; + } + else if (start < 0) + start = i + 1; + break; + case '}': + if (start < 0) { + if ((i+1) == description.Length || description [i+1] != '}') + throw new InvalidOperationException ("Invalid option description: " + description); + ++i; + sb.Append ("}"); + } + else { + sb.Append (description.Substring (start, i - start)); + start = -1; + } + break; + case ':': + if (start < 0) + goto default; + start = i + 1; + break; + default: + if (start < 0) + sb.Append (description [i]); + break; + } + } + return sb.ToString (); + } + + private static IEnumerable GetLines (string description) + { + if (string.IsNullOrEmpty (description)) { + yield return string.Empty; + yield break; + } + int length = 80 - OptionWidth - 1; + int start = 0, end; + do { + end = GetLineEnd (start, length, description); + char c = description [end-1]; + if (char.IsWhiteSpace (c)) + --end; + bool writeContinuation = end != description.Length && !IsEolChar (c); + string line = description.Substring (start, end - start) + + (writeContinuation ? "-" : ""); + yield return line; + start = end; + if (char.IsWhiteSpace (c)) + ++start; + length = 80 - OptionWidth - 2 - 1; + } while (end < description.Length); + } + + private static bool IsEolChar (char c) + { + return !char.IsLetterOrDigit (c); + } + + private static int GetLineEnd (int start, int length, string description) + { + int end = System.Math.Min (start + length, description.Length); + int sep = -1; + for (int i = start; i < end; ++i) { + if (description [i] == '\n') + return i+1; + if (IsEolChar (description [i])) + sep = i+1; + } + if (sep == -1 || end == description.Length) + return end; + return sep; + } + } +} + diff --git a/SparkleShare/SparkleSetup.cs b/SparkleShare/SparkleSetup.cs index 84a7ffd8..1a314f88 100755 --- a/SparkleShare/SparkleSetup.cs +++ b/SparkleShare/SparkleSetup.cs @@ -34,15 +34,6 @@ namespace SparkleShare { private string SecondaryTextColor; private string SecondaryTextColorSelected; - private Entry NameEntry; - private Entry EmailEntry; - private SparkleEntry AddressEntry; - private SparkleEntry PathEntry; - - private Button NextButton; - private Button SyncButton; - - private Table Table; private ProgressBar progress_bar = new ProgressBar (); @@ -65,7 +56,7 @@ namespace SparkleShare { ) ); - Controller.ChangePageEvent += delegate (PageType type) { + Controller.ChangePageEvent += delegate (PageType type, string [] warnings) { Application.Invoke (delegate { Reset (); @@ -73,53 +64,71 @@ namespace SparkleShare { case PageType.Setup: { Header = _("Welcome to SparkleShare!"); - Description = _("Before we can create a SparkleShare folder on this " + - "computer, we need a few bits of information from you."); + Description = "We'll need some info to mark your changes in the event log. " + + "Don't worry, this stays between you and your peers."; - Table = new Table (4, 2, true) { - RowSpacing = 6 + + Table table = new Table (2, 3, true) { + RowSpacing = 6, + ColumnSpacing = 6 }; Label name_label = new Label ("" + _("Full Name:") + "") { UseMarkup = true, - Xalign = 0 + Xalign = 1 }; - NameEntry = new Entry (Controller.GuessedUserName); - NameEntry.Changed += delegate { - CheckSetupPage (); + Entry name_entry = new Entry (Controller.GuessedUserName) { + Xalign = 0 }; - EmailEntry = new Entry (Controller.GuessedUserEmail); - EmailEntry.Changed += delegate { - CheckSetupPage (); + Entry email_entry = new Entry (Controller.GuessedUserEmail) { + Xalign = 0 + }; + + name_entry.Changed += delegate { + Controller.CheckSetupPage (name_entry.Text, email_entry.Text); + }; + + email_entry.Changed += delegate { + Controller.CheckSetupPage (name_entry.Text, email_entry.Text); }; Label email_label = new Label ("" + _("Email:") + "") { UseMarkup = true, - Xalign = 0 + Xalign = 1 }; - Table.Attach (name_label, 0, 1, 0, 1); - Table.Attach (NameEntry, 1, 2, 0, 1); - Table.Attach (email_label, 0, 1, 1, 2); - Table.Attach (EmailEntry, 1, 2, 1, 2); + table.Attach (name_label, 0, 1, 0, 1); + table.Attach (name_entry, 1, 2, 0, 1); + table.Attach (email_label, 0, 1, 1, 2); + table.Attach (email_entry, 1, 2, 1, 2); + + VBox wrapper = new VBox (false, 9); + wrapper.PackStart (table, true, false, 0); - NextButton = new Button (_("Next")) { + Button continue_button = new Button (_("Continue")) { Sensitive = false }; - NextButton.Clicked += delegate (object o, EventArgs args) { - string full_name = NameEntry.Text; - string email = EmailEntry.Text; + continue_button.Clicked += delegate (object o, EventArgs args) { + string full_name = name_entry.Text; + string email = email_entry.Text; Controller.SetupPageCompleted (full_name, email); }; - AddButton (NextButton); - Add (Table); + AddButton (continue_button); + Add (wrapper); - CheckSetupPage (); + + Controller.UpdateSetupContinueButtonEvent += delegate (bool button_enabled) { + Application.Invoke (delegate { + continue_button.Sensitive = button_enabled; + }); + }; + + Controller.CheckSetupPage (name_entry.Text, email_entry.Text); break; } @@ -133,10 +142,11 @@ namespace SparkleShare { VBox layout_address = new VBox (true, 0); VBox layout_path = new VBox (true, 0); + ListStore store = new ListStore (typeof (Gdk.Pixbuf), typeof (string), typeof (SparklePlugin)); - TreeView tree = new TreeView (store) { HeadersVisible = false }; + SparkleTreeView tree = new SparkleTreeView (store) { HeadersVisible = false }; ScrolledWindow scrolled_window = new ScrolledWindow (); scrolled_window.AddWithViewport (tree); @@ -162,8 +172,8 @@ namespace SparkleShare { tree.AppendColumn (service_column); - PathEntry = new SparkleEntry (); - AddressEntry = new SparkleEntry (); + SparkleEntry path_entry = new SparkleEntry (); + SparkleEntry address_entry = new SparkleEntry (); // Select the first plugin by default @@ -174,20 +184,20 @@ namespace SparkleShare { Controller.ChangeAddressFieldEvent += delegate (string text, string example_text, FieldState state) { - Console.WriteLine ("> " + text); + Application.Invoke (delegate { - AddressEntry.Text = text; - AddressEntry.Sensitive = (state == FieldState.Enabled); + address_entry.Text = text; + address_entry.Sensitive = (state == FieldState.Enabled); if (string.IsNullOrEmpty (example_text)) - AddressEntry.ExampleText = null; + address_entry.ExampleText = null; else - AddressEntry.ExampleText = example_text; + address_entry.ExampleText = example_text; if (string.IsNullOrEmpty (text)) - AddressEntry.ExampleTextActive = true; + address_entry.ExampleTextActive = true; else - AddressEntry.ExampleTextActive = false; + address_entry.ExampleTextActive = false; }); }; @@ -195,34 +205,24 @@ namespace SparkleShare { string example_text, FieldState state) { Application.Invoke (delegate { - PathEntry.Text = text; - PathEntry.Sensitive = (state == FieldState.Enabled); + path_entry.Text = text; + path_entry.Sensitive = (state == FieldState.Enabled); if (string.IsNullOrEmpty (example_text)) - PathEntry.ExampleText = null; + path_entry.ExampleText = null; else - PathEntry.ExampleText = example_text; + path_entry.ExampleText = example_text; if (string.IsNullOrEmpty (text)) - PathEntry.ExampleTextActive = true; + path_entry.ExampleTextActive = true; else - PathEntry.ExampleTextActive = false; + path_entry.ExampleTextActive = false; }); }; // Update the address field text when the selection changes tree.CursorChanged += delegate (object sender, EventArgs e) { - TreeIter iter; - TreeModel model; - - TreeSelection selection = (sender as TreeView).Selection; - selection.GetSelected (out model, out iter); - - // SparklePlugin plugin = (SparklePlugin) model.GetValue (iter, 2); - int selected_path = int.Parse (model.GetPath (iter).ToString ()); - - Controller.SelectedPluginChanged (selected_path); - + Controller.SelectedPluginChanged (tree.SelectedRow); // TODO: Scroll to selected row when using arrow keys }; @@ -245,10 +245,10 @@ namespace SparkleShare { SparklePlugin plugin = (SparklePlugin) model.GetValue (iter, 2); if (plugin.Address != null) { - AddressEntry.Sensitive = false;} + address_entry.Sensitive = false;} if (plugin.Path != null) - PathEntry.Sensitive = false; + path_entry.Sensitive = false; // TODO: Scroll to the selection @@ -258,43 +258,46 @@ namespace SparkleShare { } })); - AddressEntry.Completion = new EntryCompletion(); + address_entry.Completion = new EntryCompletion(); ListStore server_store = new ListStore (typeof (string)); foreach (string host in Program.Controller.PreviousHosts) server_store.AppendValues (host); - AddressEntry.Completion.Model = server_store; - AddressEntry.Completion.TextColumn = 0; + address_entry.Completion.Model = server_store; + address_entry.Completion.TextColumn = 0; - AddressEntry.Changed += delegate { - CheckAddPage (); + address_entry.Changed += delegate { + Controller.CheckAddPage (address_entry.Text, path_entry.Text, tree.SelectedRow); }; layout_address.PackStart (new Label () { - Markup = "" + _("Address") + "", - Xalign = 0 - }, true, true, 0); + Markup = "" + _("Address") + "", + Xalign = 0 + }, true, true, 0); - layout_address.PackStart (AddressEntry, true, true, 0); + layout_address.PackStart (address_entry, true, true, 0); - PathEntry.Completion = new EntryCompletion(); + path_entry.Completion = new EntryCompletion(); ListStore folder_store = new ListStore (typeof (string)); //foreach (string host in Program.Controller.FolderPaths) // folder_store.AppendValues (host); - PathEntry.Completion.Model = folder_store; - PathEntry.Completion.TextColumn = 0; + path_entry.Completion.Model = folder_store; + path_entry.Completion.TextColumn = 0; - PathEntry.Changed += delegate { - CheckAddPage (); + path_entry.Changed += delegate { + Controller.CheckAddPage (address_entry.Text, path_entry.Text, tree.SelectedRow); }; - layout_path.PackStart (new Label () { Markup = "" + _("Remote Path") + "", Xalign = 0 }, - true, true, 0); - layout_path.PackStart (PathEntry, true, true, 0); + layout_path.PackStart (new Label () { + Markup = "" + _("Remote Path") + "", + Xalign = 0 + }, true, true, 0); + + layout_path.PackStart (path_entry, true, true, 0); layout_fields.PackStart (layout_address); layout_fields.PackStart (layout_path); @@ -313,19 +316,19 @@ namespace SparkleShare { }; // Sync button - SyncButton = new Button (_("Add")); + Button add_button = new Button (_("Add")); - SyncButton.Clicked += delegate { - string server = AddressEntry.Text; - string folder_name = PathEntry.Text; + add_button.Clicked += delegate { + string server = address_entry.Text; + string folder_name = path_entry.Text; Controller.AddPageCompleted (server, folder_name); }; AddButton (cancel_button); - AddButton (SyncButton); + AddButton (add_button); - CheckAddPage (); + Controller.CheckAddPage (address_entry.Text, path_entry.Text, tree.SelectedRow); break; } @@ -461,7 +464,30 @@ namespace SparkleShare { Close (); }; - Add (null); + + if (warnings != null) { + Image warning_image = new Image ( + SparkleUIHelpers.GetIcon ("dialog-warning", 24) + ); + + Label warning_label = new Label (warnings [0]) { + Xalign = 0, + Wrap = true + }; + + HBox warning_layout = new HBox (false, 0); + warning_layout.PackStart (warning_image, false, false, 0); + warning_layout.PackStart (warning_label, true, true, 15); + + VBox warning_wrapper = new VBox (false, 0); + warning_wrapper.PackStart (warning_layout, false, false, 0); + + Add (warning_wrapper); + + } else { + Add (null); + } + AddButton (open_folder_button); AddButton (finish_button); @@ -575,42 +601,6 @@ namespace SparkleShare { } - // Enables or disables the 'Next' button depending on the - // entries filled in by the user - private void CheckSetupPage () - { - if (NameEntry.Text.Length > 0 && - Program.Controller.IsValidEmail (EmailEntry.Text)) { - - NextButton.Sensitive = true; - } else { - NextButton.Sensitive = false; - } - } - - - // Enables or disables the 'Next' button depending on the - // entries filled in by the user - public void CheckAddPage () - { - SyncButton.Sensitive = false; - - if (PathEntry.ExampleTextActive || - (AddressEntry.Sensitive && AddressEntry.ExampleTextActive)) - return; - - bool IsFolder = !PathEntry.Text.Trim ().Equals (""); - bool IsServer = !AddressEntry.Text.Trim ().Equals (""); - - if (AddressEntry.Sensitive == true) { - if (IsServer && IsFolder) - SyncButton.Sensitive = true; - } else if (IsFolder) { - SyncButton.Sensitive = true; - } - } - - private void RenderServiceColumn (TreeViewColumn column, CellRenderer cell, TreeModel model, TreeIter iter) { @@ -635,4 +625,25 @@ namespace SparkleShare { ); } } + + + public class SparkleTreeView : TreeView { + + public int SelectedRow + { + get { + TreeIter iter; + TreeModel model; + + Selection.GetSelected (out model, out iter); + + return int.Parse (model.GetPath (iter).ToString ()); + } + } + + + public SparkleTreeView (ListStore store) : base (store) + { + } + } } diff --git a/SparkleShare/SparkleSetupController.cs b/SparkleShare/SparkleSetupController.cs index 317a7c27..591eb003 100755 --- a/SparkleShare/SparkleSetupController.cs +++ b/SparkleShare/SparkleSetupController.cs @@ -18,6 +18,7 @@ using System; using System.Collections.Generic; using System.IO; +using System.Text.RegularExpressions; using SparkleLib; @@ -32,15 +33,26 @@ namespace SparkleShare { Tutorial } + public enum FieldState { + Enabled, + Disabled + } + public class SparkleSetupController { public event ChangePageEventHandler ChangePageEvent; - public delegate void ChangePageEventHandler (PageType page); + public delegate void ChangePageEventHandler (PageType page, string [] warnings); public event UpdateProgressBarEventHandler UpdateProgressBarEvent; public delegate void UpdateProgressBarEventHandler (double percentage); + public event UpdateSetupContinueButtonEventHandler UpdateSetupContinueButtonEvent; + public delegate void UpdateSetupContinueButtonEventHandler (bool button_enabled); + + public event UpdateAddProjectButtonEventHandler UpdateAddProjectButtonEvent; + public delegate void UpdateAddProjectButtonEventHandler (bool button_enabled); + public event ChangeAddressFieldEventHandler ChangeAddressFieldEvent; public delegate void ChangeAddressFieldEventHandler (string text, string example_text, FieldState state); @@ -140,7 +152,7 @@ namespace SparkleShare { SelectedPlugin = Plugins [0]; - ChangePageEvent += delegate (PageType page) { + ChangePageEvent += delegate (PageType page, string [] warning) { this.previous_page = page; }; } @@ -149,7 +161,20 @@ namespace SparkleShare { public void ShowSetupPage () { if (ChangePageEvent != null) - ChangePageEvent (PageType.Setup); + ChangePageEvent (PageType.Setup, null); + } + + + public void CheckSetupPage (string full_name, string email) + { + full_name = full_name.Trim (); + email = email.Trim (); + + bool fields_valid = full_name != null && full_name.Trim().Length > 0 && + IsValidEmail (email); + + if (UpdateSetupContinueButtonEvent != null) + UpdateSetupContinueButtonEvent (fields_valid); } @@ -162,7 +187,7 @@ namespace SparkleShare { Program.Controller.UpdateState (); if (ChangePageEvent != null) - ChangePageEvent (PageType.Tutorial); + ChangePageEvent (PageType.Tutorial, null); } @@ -171,7 +196,7 @@ namespace SparkleShare { this.tutorial_page_number++; if (ChangePageEvent != null) - ChangePageEvent (PageType.Tutorial); + ChangePageEvent (PageType.Tutorial, null); } @@ -180,19 +205,35 @@ namespace SparkleShare { this.tutorial_page_number = 4; if (ChangePageEvent != null) - ChangePageEvent (PageType.Tutorial); + ChangePageEvent (PageType.Tutorial, null); } public void ShowAddPage () { if (ChangePageEvent != null) - ChangePageEvent (PageType.Add); + ChangePageEvent (PageType.Add, null); SelectedPluginChanged (SelectedPluginIndex); } + public void CheckAddPage (string address, string remote_path, int selected_plugin) + { + if (SelectedPluginIndex != selected_plugin) + SelectedPluginChanged (selected_plugin); + + address = address.Trim (); + remote_path = remote_path.Trim (); + + bool fields_valid = address != null && address.Trim().Length > 0 && + remote_path != null && remote_path.Trim().Length > 0; + + if (UpdateAddProjectButtonEvent != null) + UpdateAddProjectButtonEvent (fields_valid); + } + + public void AddPageCompleted (string address, string path) { this.syncing_folder = Path.GetFileNameWithoutExtension (path); @@ -200,11 +241,13 @@ namespace SparkleShare { this.previous_path = path; if (ChangePageEvent != null) - ChangePageEvent (PageType.Syncing); + ChangePageEvent (PageType.Syncing, null); - Program.Controller.FolderFetched += delegate { + // TODO: Remove events afterwards + + Program.Controller.FolderFetched += delegate (string [] warnings) { if (ChangePageEvent != null) - ChangePageEvent (PageType.Finished); + ChangePageEvent (PageType.Finished, warnings); this.previous_address = ""; this.syncing_folder = ""; @@ -216,7 +259,7 @@ namespace SparkleShare { this.previous_url = remote_url; if (ChangePageEvent != null) - ChangePageEvent (PageType.Error); + ChangePageEvent (PageType.Error, null); this.syncing_folder = ""; }; @@ -233,7 +276,7 @@ namespace SparkleShare { public void ErrorPageCompleted () { if (ChangePageEvent != null) - ChangePageEvent (PageType.Add); + ChangePageEvent (PageType.Add, null); } @@ -242,10 +285,13 @@ namespace SparkleShare { Program.Controller.StopFetcher (); if (ChangePageEvent != null) - ChangePageEvent (PageType.Add); + ChangePageEvent (PageType.Add, null); } + // TODO: public void WindowClosed () { } + + public void FinishedPageCompleted () { this.previous_address = ""; @@ -283,11 +329,14 @@ namespace SparkleShare { ChangePathFieldEvent ("", "", FieldState.Enabled); } } - } - public enum FieldState { - Enabled, - Disabled + private bool IsValidEmail (string email) + { + Regex regex = new Regex (@"^[A-Z0-9._%+-]+@[A-Z0-9.-]+\.[A-Z]{2,4}$", + RegexOptions.IgnoreCase); + + return regex.IsMatch (email); + } } } diff --git a/SparkleShare/SparkleShare.csproj b/SparkleShare/SparkleShare.csproj index dca42164..86ea9285 100644 --- a/SparkleShare/SparkleShare.csproj +++ b/SparkleShare/SparkleShare.csproj @@ -76,5 +76,7 @@ + + diff --git a/SparkleShare/SparkleSpinner.cs b/SparkleShare/SparkleSpinner.cs index 69f1bc23..ef4c3347 100755 --- a/SparkleShare/SparkleSpinner.cs +++ b/SparkleShare/SparkleSpinner.cs @@ -84,8 +84,7 @@ namespace SparkleShare { private void SetImage () { - if (CurrentStep < Images.Length) - Pixbuf = Images[CurrentStep]; + Pixbuf = Images [CurrentStep]; } diff --git a/SparkleShare/SparkleStatusIcon.cs b/SparkleShare/SparkleStatusIcon.cs index a7cdb770..5778f4a4 100644 --- a/SparkleShare/SparkleStatusIcon.cs +++ b/SparkleShare/SparkleStatusIcon.cs @@ -74,7 +74,7 @@ namespace SparkleShare { if (Controller.Folders.Length == 0) StateText = _("Welcome to SparkleShare!"); else - StateText = _("Up to date") + " " + Controller.FolderSize; + StateText = _("Up to date") + " — " + Controller.FolderSize; CreateMenu (); @@ -88,7 +88,7 @@ namespace SparkleShare { if (Controller.Folders.Length == 0) StateText = _("Welcome to SparkleShare!"); else - StateText = _("Up to date") + " " + Controller.FolderSize; + StateText = _("Up to date") + " — " + Controller.FolderSize; #if HAVE_APP_INDICATOR this.indicator.IconName = "process-syncing-sparkleshare-i"; @@ -103,8 +103,8 @@ namespace SparkleShare { case IconState.Syncing: - StateText = _("Syncing"); - UpdateStateText (); // TODO + StateText = _("Syncing…"); + UpdateStateText (); if (!Animation.Enabled) Animation.Start (); @@ -113,6 +113,8 @@ namespace SparkleShare { case IconState.Error: + Animation.Stop (); + StateText = _("Not everything is synced"); UpdateStateText (); CreateMenu (); @@ -159,8 +161,7 @@ namespace SparkleShare { else FrameNumber = 0; - string icon_name = "process-syncing-sparkleshare-"; - + string icon_name = "process-syncing-sparkleshare"; for (int i = 0; i <= FrameNumber; i++) icon_name += "i"; @@ -168,7 +169,7 @@ namespace SparkleShare { #if HAVE_APP_INDICATOR this.indicator.IconName = icon_name; #else - this.status_icon.Pixbuf = SparkleUIHelpers.GetIcon (icon_name, 24); + this.status_icon.Pixbuf = AnimationFrames [FrameNumber]; #endif }); }; @@ -235,7 +236,7 @@ namespace SparkleShare { Menu.Add (new SeparatorMenuItem ()); // Opens the wizard to add a new remote folder - MenuItem sync_item = new MenuItem (_("Add Hosted Project")); + MenuItem sync_item = new MenuItem (_("Add Hosted Project…")); if (Program.Controller.FirstRun) sync_item.Sensitive = false; @@ -260,9 +261,8 @@ namespace SparkleShare { Menu.Add (new SeparatorMenuItem ()); MenuItem recent_events_item = new MenuItem (_("Open Recent Events")); - - if (Program.Controller.Folders.Count < 1) - recent_events_item.Sensitive = false; + + recent_events_item.Sensitive = (Controller.Folders.Length > 0); recent_events_item.Activated += delegate { Application.Invoke (delegate { diff --git a/SparkleShare/SparkleStatusIconController.cs b/SparkleShare/SparkleStatusIconController.cs index 6fc54782..e8394139 100755 --- a/SparkleShare/SparkleStatusIconController.cs +++ b/SparkleShare/SparkleStatusIconController.cs @@ -18,10 +18,14 @@ using System; using System.IO; +using SparkleLib; + namespace SparkleShare { public enum IconState { Idle, + SyncingUp, + SyncingDown, Syncing, Error } @@ -44,22 +48,37 @@ namespace SparkleShare { public string FolderSize { get { - return Program.Controller.FolderSize; + double size = 0; + + foreach (SparkleRepoBase repo in Program.Controller.Repositories) + size += repo.Size + repo.HistorySize; + + return Program.Controller.FormatSize (size); } } + + public int ProgressPercentage { + get { + return (int) Program.Controller.ProgressPercentage; + } + } + + public string ProgressSpeed { + get { + return Program.Controller.ProgressSpeed; + } + } + + public SparkleStatusIconController () { - Program.Controller.FolderSizeChanged += delegate { - if (UpdateMenuEvent != null) - UpdateMenuEvent (CurrentState); - }; - Program.Controller.FolderListChanged += delegate { if (UpdateMenuEvent != null) UpdateMenuEvent (CurrentState); }; + Program.Controller.OnIdle += delegate { if (CurrentState != IconState.Error) CurrentState = IconState.Idle; @@ -68,15 +87,15 @@ namespace SparkleShare { UpdateMenuEvent (CurrentState); }; + Program.Controller.OnSyncing += delegate { CurrentState = IconState.Syncing; - // TODO up down both - if (UpdateMenuEvent != null) UpdateMenuEvent (IconState.Syncing); }; + Program.Controller.OnError += delegate { CurrentState = IconState.Error; diff --git a/SparkleShare/SparkleUI.cs b/SparkleShare/SparkleUI.cs index 576710bc..96ec23f2 100644 --- a/SparkleShare/SparkleUI.cs +++ b/SparkleShare/SparkleUI.cs @@ -23,11 +23,9 @@ using System.Runtime.InteropServices; using System.Text; using System.Threading; -#if __MonoCS__ using Gtk; -#else -using System.Windows.Forms; -#endif +using Mono.Unix; +using Mono.Unix.Native; using SparkleLib; namespace SparkleShare { @@ -46,24 +44,18 @@ namespace SparkleShare { // Short alias for the translations public static string _(string s) { - return Program._ (s); + return Catalog.GetString (s); } public SparkleUI () { // Initialize the application -#if __MonoCS__ Application.Init (); - GLib.ExceptionManager.UnhandledException += delegate (GLib.UnhandledExceptionArgs exArgs) { - Exception UnhandledException = (Exception)exArgs.ExceptionObject; - string ExceptionMessage = UnhandledException.Message.ToString (); - MessageDialog ExceptionDialog = new MessageDialog (null, DialogFlags.Modal, MessageType.Error, ButtonsType.Ok, - "Unhandled Exception!\n" + UnhandledException.GetType ().ToString ()); - ExceptionDialog.Title = "ERROR"; + // Use translations + Catalog.Init (Defines.GETTEXT_PACKAGE, Defines.LOCALE_DIR); -#endif StatusIcon = new SparkleStatusIcon (); Bubbles = new SparkleBubbles (); @@ -81,7 +73,6 @@ namespace SparkleShare { public void Run () { Application.Run (); - StatusIcon.Dispose (); } } } diff --git a/SparkleShare/SparkleUIHelpers.cs b/SparkleShare/SparkleUIHelpers.cs index 2ad40ff0..7bcc2eb8 100755 --- a/SparkleShare/SparkleUIHelpers.cs +++ b/SparkleShare/SparkleUIHelpers.cs @@ -42,10 +42,12 @@ namespace SparkleShare { IconTheme icon_theme = new IconTheme (); icon_theme.AppendSearchPath ( - Path.Combine (SparkleUI.AssetsPath, "icons")); + Path.Combine (SparkleUI.AssetsPath, "icons") + ); icon_theme.AppendSearchPath ( - Path.Combine (SparkleConfig.ConfigPath, "icons")); + Path.Combine (SparkleConfig.ConfigPath, "icons") + ); try { return icon_theme.LoadIcon (name, size, IconLookupFlags.GenericFallback); diff --git a/autogen.sh b/autogen.sh index 41a48155..3af42db6 100755 --- a/autogen.sh +++ b/autogen.sh @@ -73,10 +73,6 @@ else touch gnome-doc-utils.make fi -if git --help &>/dev/null; then - git submodule update --init -fi - run intltoolize --force --copy run $LIBTOOLIZE --force --copy --automake run aclocal -I build/m4/sparkleshare -I build/m4/shamrock -I build/m4/shave $ACLOCAL_FLAGS @@ -85,12 +81,6 @@ run autoconf run automake --gnu --add-missing --force --copy \ -Wno-portability -Wno-portability -if test -d $srcdir/SmartIrc4net; then - echo Running SmartIrc4net/autogen.sh ... - (cd $srcdir/SmartIrc4net; NOCONFIGURE=1 ./autogen.sh "$@") - echo Done running SmartIrc4net/autogen.sh ... -fi - if [ ! -z "$NOCONFIGURE" ]; then echo "Done. ./configure skipped." exit $? diff --git a/build/build.environment.mk b/build/build.environment.mk index a76b32ff..e59b6fd1 100755 --- a/build/build.environment.mk +++ b/build/build.environment.mk @@ -15,8 +15,6 @@ LINK_GTK = $(GTKSHARP_LIBS) LINK_GNOME = $(GNOME_SHARP_LIBS) LINK_DBUS = $(NDESK_DBUS_LIBS) $(NDESK_DBUS_GLIB_LIBS) LINK_DBUS_NO_GLIB = $(NDESK_DBUS_LIBS) -LINK_SMARTIRC4NET_SYSTEM = $(SMARTIRC4NET_LIBS) -LINK_SMARTIRC4NET_LOCAL = -r:$(abs_top_builddir)/$(SMARTIRC4NET_ASSEMBLY) LINK_APP_INDICATOR = $(APP_INDICATOR_LIBS) REF_NOTIFY_SHARP = $(LINK_SYSTEM) $(LINK_DBUS) $(GTKSHARP_LIBS) $(GLIBSHARP_LIBS) diff --git a/build/build.rules.mk b/build/build.rules.mk index a84f08ad..7740d5a4 100755 --- a/build/build.rules.mk +++ b/build/build.rules.mk @@ -26,7 +26,7 @@ ASSEMBLY_FILE = $(top_builddir)/bin/$(ASSEMBLY).$(ASSEMBLY_EXTENSION) INSTALL_DIR_RESOLVED = $(firstword $(subst , $(DEFAULT_INSTALL_DIR), $(INSTALL_DIR))) if ENABLE_TESTS - LINK += " $(NUNIT_LIBS)" + LINK = " $(NUNIT_LIBS)" ENABLE_TESTS_FLAG = "-define:ENABLE_TESTS" endif diff --git a/build/m4/sparkleshare/smartirc4net.m4 b/build/m4/sparkleshare/smartirc4net.m4 deleted file mode 100755 index 9cb1099b..00000000 --- a/build/m4/sparkleshare/smartirc4net.m4 +++ /dev/null @@ -1,16 +0,0 @@ -AC_DEFUN([SPARKLESHARE_SMARTIRC4NET], -[ - if test ! -d "$srcdir/SmartIrc4net"; then - AC_MSG_ERROR([SmartIrc4net folder not found]) - fi - ac_configure_args="$ac_configure_args --disable-pkg-config --disable-pkg-lib --disable-pkg-gac" - AC_CONFIG_SUBDIRS([SmartIrc4net]) - asm="SmartIrc4net/bin/Meebey.SmartIrc4net.dll" - SMARTIRC4NET_ASSEMBLY="$asm" - SMARTIRC4NET_FILES="$asm" - [[ -r "$asm.mdb" ]] && SMARTIRC4NET_FILES="$SMARTIRC4NET_FILES $asm.mdb" - - AC_SUBST([SMARTIRC4NET_ASSEMBLY]) - AC_SUBST([SMARTIRC4NET_FILES]) -]) - diff --git a/configure.ac b/configure.ac index 2116eb11..2c97ee9f 100755 --- a/configure.ac +++ b/configure.ac @@ -1,9 +1,9 @@ dnl Process this file with autoconf to produce a configure script. m4_define([sparkleshare_version], - [0.4.0]) + [0.6.0]) m4_define([sparkleshare_asm_version], - [0.4.0]) + [0.6.0]) AC_PREREQ([2.54]) AC_INIT([SparkleShare], sparkleshare_version) @@ -87,18 +87,6 @@ dnl package checks, common for all configs SPARKLESHARE_CHECK_GTK_SHARP -SPARKLESHARE_SMARTIRC4NET -PKG_CHECK_MODULES([SMARTIRC4NET], [smartirc4net >= 0.5], - SMARTIRC4NET_ASSEMBLY="" - AC_SUBST(SMARTIRC4NET_ASSEMBLY) - , - SMARTIRC4NET_LIBS="" - AC_SUBST(SMARTIRC4NET_LIBS) - SPARKLE_SMARTIRC4NETDIR=SmartIrc4Net - AC_SUBST(SPARKLE_SMARTIRC4NETDIR) -) -AM_CONDITIONAL([HAVE_SMARTIRC4NET], test x$SPARKLE_SMARTIRC4NETDIR = x) - AC_ARG_ENABLE(gtkui, AS_HELP_STRING([--disable-gtkui], [Do not build the Gtk+ user interface]), [ enable_gtkui=no ], [ enable_gtkui=yes ]) diff --git a/data/git-hooks/post-update b/data/git-hooks/post-update index 45078f3c..c1673f66 100755 --- a/data/git-hooks/post-update +++ b/data/git-hooks/post-update @@ -1,28 +1,39 @@ #!/bin/sh -# for use with gitolite # -# http://sitaramc.github.com/gitolite/hooks.html -# copy this file to -# .gitolite/hooks/common/post-update -# run gl-setup again - -# for use with standard ssh/http(s)/git repos +# This Git hook sends update message to a notification server, so +# that manual git pushes to a repository also will be noticed by +# SparkleShare clients. +# +# For information on running your own notification service: +# https://github.com/hbons/fanout.node.js +# +# +# For use with Gitolite: +# +# Copy this file to .gitolite/hooks/common/post-update. +# Run "gl-setup" again. +# +# +# For use with standard Git repositories: +# +# Copy this file to .git/hooks/post-update in +# the remote repository +# +# +# Make sure to "chmod -x" this hook after the file has been copied # -# simply move this file to -# .git/hooks/post-update in the remote repository -# make sure to chmod -x in all cases after the file has been copied +# Change these if you run your own service somewhere +SERVER="notifications.sparkleshare.org" +PORT="1986" -#To supress all output +# Don't edit below this line exec > /dev/null 2>&1 -# for information on running your own server -# https://github.com/hbons/fanout.node.js -SERVER="204.62.14.135" -PORT="1986" CHANNEL=$(git rev-list --reverse HEAD | head -n 1) MESSAGE=$(git rev-list HEAD | head -n 1) + DATA="announce ${CHANNEL} ${MESSAGE}" echo "${DATA}\n" | socat - TCP-CONNECT:${SERVER}:${PORT} & diff --git a/data/tutorial-slide-1-mac.png b/data/tutorial-slide-1-mac.png new file mode 100644 index 00000000..17d40d1c Binary files /dev/null and b/data/tutorial-slide-1-mac.png differ diff --git a/data/tutorial-slide-2-mac.png b/data/tutorial-slide-2-mac.png new file mode 100644 index 00000000..f2865c98 Binary files /dev/null and b/data/tutorial-slide-2-mac.png differ diff --git a/data/tutorial-slide-3-mac.png b/data/tutorial-slide-3-mac.png new file mode 100644 index 00000000..837f125d Binary files /dev/null and b/data/tutorial-slide-3-mac.png differ diff --git a/data/tutorial-slide-4.png b/data/tutorial-slide-4.png index fe95f230..ae642773 100755 Binary files a/data/tutorial-slide-4.png and b/data/tutorial-slide-4.png differ diff --git a/lib/TODO b/lib/TODO deleted file mode 100755 index de67e20b..00000000 --- a/lib/TODO +++ /dev/null @@ -1 +0,0 @@ -Move external dependancies in here (only SmartIrc4net at the moment) diff --git a/po/POTFILES.in b/po/POTFILES.in index fbf8c10f..474763a1 100755 --- a/po/POTFILES.in +++ b/po/POTFILES.in @@ -16,6 +16,7 @@ SparkleShare/SparkleAbout.cs SparkleShare/SparkleController.cs SparkleShare/SparkleControllerBase.cs SparkleShare/SparkleEventLog.cs +SparkleShare/SparkleOptions.cs SparkleShare/SparkleSetup.cs SparkleShare/SparkleSetupWindow.cs SparkleShare/SparkleSpinner.cs diff --git a/po/de.po b/po/de.po index 9ce2741f..b60fdfda 100755 --- a/po/de.po +++ b/po/de.po @@ -18,7 +18,7 @@ msgstr "" "Project-Id-Version: SparkleShare\n" "Report-Msgid-Bugs-To: \n" "POT-Creation-Date: 2011-11-12 18:42+0100\n" -"PO-Revision-Date: 2011-11-14 07:55+0000\n" +"PO-Revision-Date: 2011-11-16 09:20+0000\n" "Last-Translator: iwsnipy \n" "MIME-Version: 1.0\n" "Content-Type: text/plain; charset=UTF-8\n" @@ -36,11 +36,11 @@ msgstr "Bitbucket" #: ../data/plugins/bitbucket.xml.in.h:3 msgid "Free code hosting for Git and Mercurial" -msgstr "" +msgstr "Freies Code-Hosting für Git und Mercurial" #: ../data/plugins/github.xml.in.h:2 msgid "Free public Git repositories with collaborator management" -msgstr "" +msgstr "Freie öffentliche Git Repositories mit Mitarbeiterverwaltung" #: ../data/plugins/github.xml.in.h:3 msgid "Github" @@ -56,7 +56,7 @@ msgstr "Gitorious" #: ../data/plugins/gitorious.xml.in.h:3 msgid "Open source infrastructure for hosting open source projects" -msgstr "" +msgstr "Quelloffene Infrastruktur zum hosten von quelloffenen Projekten" #: ../data/plugins/gnome.xml.in.h:1 msgid "/project" @@ -205,11 +205,11 @@ msgstr "Suche Aktualisierungen..." #: ../SparkleShare/SparkleControllerBase.cs:494 msgid "dddd, MMMM d, yyyy" -msgstr "" +msgstr "dd mm, yyyy" #: ../SparkleShare/SparkleControllerBase.cs:500 msgid "dddd, MMMM d" -msgstr "" +msgstr "dd mm" #: ../SparkleShare/SparkleControllerBase.cs:708 #, csharp-format @@ -235,12 +235,12 @@ msgstr "Gelöscht '{0}'" #, csharp-format msgid "and {0} more" msgid_plural "and {0} more" -msgstr[0] "" -msgstr[1] "" +msgstr[0] "und {0} mehr" +msgstr[1] "und {0} mehr" #: ../SparkleShare/SparkleControllerBase.cs:736 msgid "did something magical" -msgstr "" +msgstr "hat etwas magisches" #: ../SparkleShare/SparkleEventLog.cs:58 msgid "Recent Events" @@ -273,7 +273,7 @@ msgstr "Weiter" #: ../SparkleShare/SparkleSetup.cs:129 msgid "Where's your project hosted?" -msgstr "" +msgstr "Wo ist dein Projekt gehostet?" #: ../SparkleShare/SparkleSetup.cs:275 msgid "Address" @@ -399,12 +399,14 @@ msgid "" "…or select ‘Add Hosted Project…’ from the status icon menu to add one" " by hand." msgstr "" +"... oder wähle 'Füge gehostetes Projekt hinzu...' im Statusmenü um " +"einen via Hand hinzu zu fügen." #. Opens the wizard to add a new remote folder #: ../SparkleShare/SparkleSetup.cs:551 #: ../SparkleShare/SparkleStatusIcon.cs:238 msgid "Add Hosted Project…" -msgstr "" +msgstr "Füge ein gehostetes Projekt hinzu..." #: ../SparkleShare/SparkleSetupWindow.cs:44 msgid "SparkleShare Setup" diff --git a/po/pt_BR.po b/po/pt_BR.po index c461ed7b..031f0cf0 100755 --- a/po/pt_BR.po +++ b/po/pt_BR.po @@ -3,14 +3,15 @@ # Translators: # DigitalDead , 2011. # , 2011. +# Nícolas Lazarte , 2011. # Paulo Paulo , 2011. msgid "" msgstr "" "Project-Id-Version: SparkleShare\n" "Report-Msgid-Bugs-To: \n" -"POT-Creation-Date: 2011-10-30 15:50+0100\n" -"PO-Revision-Date: 2011-11-08 12:53+0000\n" -"Last-Translator: eduardosilva \n" +"POT-Creation-Date: 2011-12-04 12:45+0100\n" +"PO-Revision-Date: 2011-12-04 11:46+0000\n" +"Last-Translator: deejay1 \n" "MIME-Version: 1.0\n" "Content-Type: text/plain; charset=UTF-8\n" "Content-Transfer-Encoding: 8bit\n" @@ -23,15 +24,15 @@ msgstr "/usuário/projeto" #: ../data/plugins/bitbucket.xml.in.h:2 msgid "Bitbucket" -msgstr "" +msgstr "Bitbucket" #: ../data/plugins/bitbucket.xml.in.h:3 msgid "Free code hosting for Git and Mercurial" -msgstr "" +msgstr "Hospedagem gratuita de código para Git e Mercurial" #: ../data/plugins/github.xml.in.h:2 msgid "Free public Git repositories with collaborator management" -msgstr "" +msgstr "Repositórios públicos Git, gratuitos com gestão de colaboradores" #: ../data/plugins/github.xml.in.h:3 msgid "Github" @@ -48,6 +49,8 @@ msgstr "Gitorious" #: ../data/plugins/gitorious.xml.in.h:3 msgid "Open source infrastructure for hosting open source projects" msgstr "" +"Infra-estrutura de código aberto para hospedagem de projetos com código " +"aberto " #: ../data/plugins/gnome.xml.in.h:1 msgid "/project" @@ -55,7 +58,7 @@ msgstr "/projeto" #: ../data/plugins/gnome.xml.in.h:2 msgid "A free and easy interface for your computer" -msgstr "" +msgstr "Uma interface fácil e gratuita para o seu computador" #: ../data/plugins/gnome.xml.in.h:3 msgid "The GNOME Project" @@ -73,105 +76,110 @@ msgstr "Tudo sob meu controle" msgid "On my own server" msgstr "Em meu próprio servidor" -#: ../SparkleShare/Mac/SparkleStatusIcon.cs:70 -#: ../SparkleShare/Mac/SparkleStatusIcon.cs:88 +#: ../SparkleShare/Mac/SparkleStatusIcon.cs:84 +#: ../SparkleShare/Mac/SparkleStatusIcon.cs:102 #: ../SparkleShare/SparkleSetup.cs:75 ../SparkleShare/SparkleStatusIcon.cs:75 #: ../SparkleShare/SparkleStatusIcon.cs:89 msgid "Welcome to SparkleShare!" msgstr "Bem-vindo ao SparkleShare!" -#: ../SparkleShare/Mac/SparkleStatusIcon.cs:72 -#: ../SparkleShare/Mac/SparkleStatusIcon.cs:90 +#: ../SparkleShare/Mac/SparkleStatusIcon.cs:86 +#: ../SparkleShare/Mac/SparkleStatusIcon.cs:104 #: ../SparkleShare/SparkleStatusIcon.cs:77 #: ../SparkleShare/SparkleStatusIcon.cs:91 msgid "Up to date" msgstr "Atualizado" -#: ../SparkleShare/Mac/SparkleStatusIcon.cs:99 +#: ../SparkleShare/Mac/SparkleStatusIcon.cs:113 #: ../SparkleShare/SparkleStatusIcon.cs:106 msgid "Syncing…" msgstr "Sincronizando…" -#: ../SparkleShare/Mac/SparkleStatusIcon.cs:109 -#: ../SparkleShare/SparkleStatusIcon.cs:116 +#: ../SparkleShare/Mac/SparkleStatusIcon.cs:125 +#: ../SparkleShare/SparkleStatusIcon.cs:118 msgid "Not everything is synced" msgstr "Nem tudo foi sincronizado" -#: ../SparkleShare/Nautilus/sparkleshare-nautilus-extension.py.in:113 +#: ../SparkleShare/Nautilus/sparkleshare-nautilus-extension.py.in:114 +#: ../SparkleShare/Nautilus/sparkleshare-nautilus3-extension.py.in:141 msgid "Copy Web Link" msgstr "Copiar Endereço Web" -#: ../SparkleShare/Nautilus/sparkleshare-nautilus-extension.py.in:114 +#: ../SparkleShare/Nautilus/sparkleshare-nautilus-extension.py.in:115 +#: ../SparkleShare/Nautilus/sparkleshare-nautilus3-extension.py.in:142 msgid "Copy the web address of this file to the clipboard" msgstr "Copiar o endereço web deste arquivo" -#: ../SparkleShare/Nautilus/sparkleshare-nautilus-extension.py.in:147 +#: ../SparkleShare/Nautilus/sparkleshare-nautilus-extension.py.in:148 +#: ../SparkleShare/Nautilus/sparkleshare-nautilus3-extension.py.in:174 msgid "Get Earlier Version" msgstr "Obter Versão Anterior" -#: ../SparkleShare/Nautilus/sparkleshare-nautilus-extension.py.in:148 +#: ../SparkleShare/Nautilus/sparkleshare-nautilus-extension.py.in:149 +#: ../SparkleShare/Nautilus/sparkleshare-nautilus3-extension.py.in:175 msgid "Make a copy of an earlier version in this folder" msgstr "Fazer uma cópia de uma anterior nesta pasta" -#: ../SparkleShare/Nautilus/sparkleshare-nautilus-extension.py.in:161 +#: ../SparkleShare/Nautilus/sparkleshare-nautilus-extension.py.in:162 +#: ../SparkleShare/Nautilus/sparkleshare-nautilus3-extension.py.in:188 msgid "Select to get a copy of this version" msgstr "Selecione para obter uma cópia dessa versão" -#: ../SparkleShare/Program.cs:51 +#: ../SparkleShare/Program.cs:50 msgid "Print version information" msgstr "Imprimir informações da versão" -#: ../SparkleShare/Program.cs:52 +#: ../SparkleShare/Program.cs:51 msgid "Show this help text" msgstr "Exibir esse texto de ajuda" -#: ../SparkleShare/Program.cs:84 +#: ../SparkleShare/Program.cs:83 msgid "SparkleShare, a collaboration and sharing tool." msgstr "SparkleShare, uma ferramenta de colaboração e compartilhamento" -#: ../SparkleShare/Program.cs:85 +#: ../SparkleShare/Program.cs:84 msgid "Copyright (C) 2010 Hylke Bons" msgstr "Copyright (C) 2010 Hylke Bons - Todos os direitos reservados" -#: ../SparkleShare/Program.cs:87 +#: ../SparkleShare/Program.cs:86 msgid "This program comes with ABSOLUTELY NO WARRANTY." msgstr "Este programa vem com ABSOLUTAMENTE NENHUMA GARANTIA." -#: ../SparkleShare/Program.cs:89 +#: ../SparkleShare/Program.cs:88 msgid "This is free software, and you are welcome to redistribute it " msgstr "Este é um software livre, e você está convidado a distribuí-lo" -#: ../SparkleShare/Program.cs:90 +#: ../SparkleShare/Program.cs:89 msgid "under certain conditions. Please read the GNU GPLv3 for details." msgstr "" "sob certas condições. Por favor leia a licença GNU GPLv3 para mais detalhes." -#: ../SparkleShare/Program.cs:92 +#: ../SparkleShare/Program.cs:91 msgid "SparkleShare automatically syncs Git repositories in " msgstr "O SparkleShare sincroniza os repositórios do Git automaticamente" -#: ../SparkleShare/Program.cs:93 +#: ../SparkleShare/Program.cs:92 msgid "the ~/SparkleShare folder with their remote origins." msgstr "a pasta ~/SparkleShare com suas origens remotas" -#: ../SparkleShare/Program.cs:95 +#: ../SparkleShare/Program.cs:94 msgid "Usage: sparkleshare [start|stop|restart] [OPTION]..." msgstr "Utilização: sparkleshare [start|stop|restart] [OPÇÕES]..." -#: ../SparkleShare/Program.cs:96 +#: ../SparkleShare/Program.cs:95 msgid "Sync SparkleShare folder with remote repositories." msgstr "Sincroniza a pasta SparkleShare com repositórios remotos." -#: ../SparkleShare/Program.cs:98 +#: ../SparkleShare/Program.cs:97 msgid "Arguments:" msgstr "Argumentos:" -#: ../SparkleShare/Program.cs:108 +#: ../SparkleShare/Program.cs:107 msgid "SparkleShare " msgstr "SparkleShare" #. A menu item that takes the user to http://www.sparkleshare.org/ -#: ../SparkleShare/SparkleAbout.cs:53 ../SparkleShare/SparkleStatusIcon.cs:295 +#: ../SparkleShare/SparkleAbout.cs:53 ../SparkleShare/SparkleStatusIcon.cs:297 msgid "About SparkleShare" msgstr "Sobre o SparkleShare" @@ -184,65 +192,30 @@ msgstr "Uma nova versão ({0}) está disponível!" msgid "You are running the latest version." msgstr "Você está executando a última versão" -#: ../SparkleShare/SparkleAbout.cs:88 ../SparkleShare/SparkleAbout.cs:113 +#: ../SparkleShare/SparkleAbout.cs:88 ../SparkleShare/SparkleAbout.cs:110 msgid "Checking for updates..." msgstr "Verificando atualizações" -#: ../SparkleShare/SparkleControllerBase.cs:493 +#: ../SparkleShare/SparkleControllerBase.cs:489 msgid "dddd, MMMM d, yyyy" -msgstr "" +msgstr "dddd, MMMM d, yyyy" -#: ../SparkleShare/SparkleControllerBase.cs:499 +#: ../SparkleShare/SparkleControllerBase.cs:495 msgid "dddd, MMMM d" -msgstr "" +msgstr "dddd, MMMM d" -#: ../SparkleShare/SparkleControllerBase.cs:707 -#, csharp-format -msgid "added ‘{0}’" -msgstr "adicionado \"{0} '" - -#: ../SparkleShare/SparkleControllerBase.cs:712 -#, csharp-format -msgid "moved ‘{0}’" -msgstr "movido '{0}'" - -#: ../SparkleShare/SparkleControllerBase.cs:717 -#, csharp-format -msgid "edited ‘{0}’" -msgstr "editado '{0}'" - -#: ../SparkleShare/SparkleControllerBase.cs:722 -#, csharp-format -msgid "deleted ‘{0}’" -msgstr "removido '{0}'" - -#: ../SparkleShare/SparkleControllerBase.cs:731 -#, csharp-format -msgid "and {0} more" -msgid_plural "and {0} more" -msgstr[0] "" -msgstr[1] "" - -#: ../SparkleShare/SparkleControllerBase.cs:735 -msgid "did something magical" -msgstr "fez algo mágico" - -#: ../SparkleShare/SparkleEventLog.cs:58 +#: ../SparkleShare/SparkleEventLog.cs:56 msgid "Recent Events" msgstr "Eventos Recentes" -#: ../SparkleShare/SparkleEventLog.cs:169 -#: ../SparkleShare/SparkleEventLog.cs:188 +#: ../SparkleShare/SparkleEventLog.cs:145 +#: ../SparkleShare/SparkleEventLog.cs:164 msgid "All Folders" msgstr "Todas as pastas" -#: ../SparkleShare/SparkleSetup.cs:76 -msgid "" -"Before we can create a SparkleShare folder on this computer, we need a few " -"bits of information from you." +#: ../SparkleShare/SparkleOptions.cs:486 +msgid "OptionName" msgstr "" -"Antes de criarmos uma pasta SparkleShare neste computador, precisamos de " -"algumas informações sobre você." #: ../SparkleShare/SparkleSetup.cs:83 msgid "Full Name:" @@ -260,73 +233,73 @@ msgstr "Próximo" msgid "Where's your project hosted?" msgstr "Onde está o seu projeto está hospedado?" -#: ../SparkleShare/SparkleSetup.cs:275 +#: ../SparkleShare/SparkleSetup.cs:266 msgid "Address" msgstr "Endereço" -#: ../SparkleShare/SparkleSetup.cs:295 +#: ../SparkleShare/SparkleSetup.cs:286 msgid "Remote Path" msgstr "Caminho remoto" #. Cancel button -#: ../SparkleShare/SparkleSetup.cs:309 ../SparkleShare/SparkleSetup.cs:345 +#: ../SparkleShare/SparkleSetup.cs:300 ../SparkleShare/SparkleSetup.cs:336 msgid "Cancel" msgstr "Cancelar" #. Sync button -#: ../SparkleShare/SparkleSetup.cs:316 +#: ../SparkleShare/SparkleSetup.cs:307 msgid "Add" msgstr "Adicionar" -#: ../SparkleShare/SparkleSetup.cs:335 +#: ../SparkleShare/SparkleSetup.cs:326 #, csharp-format msgid "Adding project ‘{0}’…" msgstr "Adicionando projeto '{0}' ..." -#: ../SparkleShare/SparkleSetup.cs:336 +#: ../SparkleShare/SparkleSetup.cs:327 msgid "This may take a while." msgstr "Isto pode demorar um pouco." -#: ../SparkleShare/SparkleSetup.cs:337 +#: ../SparkleShare/SparkleSetup.cs:328 msgid "Are you sure it’s not coffee o'clock?" msgstr "Você tem certeza de que não é hora do café?" -#: ../SparkleShare/SparkleSetup.cs:341 ../SparkleShare/SparkleSetup.cs:457 -#: ../SparkleShare/SparkleSetup.cs:556 +#: ../SparkleShare/SparkleSetup.cs:332 ../SparkleShare/SparkleSetup.cs:448 +#: ../SparkleShare/SparkleSetup.cs:558 msgid "Finish" msgstr "Finalizar" -#: ../SparkleShare/SparkleSetup.cs:374 +#: ../SparkleShare/SparkleSetup.cs:365 msgid "Something went wrong" msgstr "Algum problema ocorreu" -#: ../SparkleShare/SparkleSetup.cs:422 +#: ../SparkleShare/SparkleSetup.cs:413 msgid "Try Again…" msgstr "Tente novamente..." -#: ../SparkleShare/SparkleSetup.cs:441 +#: ../SparkleShare/SparkleSetup.cs:432 #, csharp-format msgid "‘{0}’ has been successfully added" msgstr "'{0}' foi incluída com sucesso" -#: ../SparkleShare/SparkleSetup.cs:447 +#: ../SparkleShare/SparkleSetup.cs:438 msgid "Project successfully added!" msgstr "Projeto adicionado com sucesso!" -#: ../SparkleShare/SparkleSetup.cs:448 +#: ../SparkleShare/SparkleSetup.cs:439 msgid "Access the files from your SparkleShare folder." msgstr "Acessar os arquivos da pasta SparkleShare." #. A button that opens the synced folder -#: ../SparkleShare/SparkleSetup.cs:451 +#: ../SparkleShare/SparkleSetup.cs:442 msgid "Open Folder" msgstr "Abrir Pasta" -#: ../SparkleShare/SparkleSetup.cs:477 +#: ../SparkleShare/SparkleSetup.cs:484 msgid "What's happening next?" msgstr "O que está acontecendo em seguida?" -#: ../SparkleShare/SparkleSetup.cs:478 +#: ../SparkleShare/SparkleSetup.cs:485 msgid "" "SparkleShare creates a special folder in your personal folder that will keep" " track of your projects." @@ -334,20 +307,20 @@ msgstr "" "SparkleShare cria uma pasta especial em sua pasta pessoal que vai manter o " "controle de seus projetos." -#: ../SparkleShare/SparkleSetup.cs:481 +#: ../SparkleShare/SparkleSetup.cs:488 msgid "Skip Tutorial" msgstr "Pular Tutorial" -#: ../SparkleShare/SparkleSetup.cs:486 ../SparkleShare/SparkleSetup.cs:506 -#: ../SparkleShare/SparkleSetup.cs:524 +#: ../SparkleShare/SparkleSetup.cs:493 ../SparkleShare/SparkleSetup.cs:513 +#: ../SparkleShare/SparkleSetup.cs:531 msgid "Continue" msgstr "Continuar" -#: ../SparkleShare/SparkleSetup.cs:502 +#: ../SparkleShare/SparkleSetup.cs:509 msgid "Sharing files with others" msgstr "Compartilhar arquivos com outras pessoas" -#: ../SparkleShare/SparkleSetup.cs:503 +#: ../SparkleShare/SparkleSetup.cs:510 msgid "" "All files added to your project folders are synced with the host " "automatically, as well as with your collaborators." @@ -355,11 +328,11 @@ msgstr "" "Todos os arquivos adicionados a pasta do seu projeto são sincronizados " "automaticamente com o servidor, bem como com seus colaboradores." -#: ../SparkleShare/SparkleSetup.cs:520 +#: ../SparkleShare/SparkleSetup.cs:527 msgid "The status icon is here to help" msgstr "O ícone de status está aqui para ajudar" -#: ../SparkleShare/SparkleSetup.cs:521 +#: ../SparkleShare/SparkleSetup.cs:528 msgid "" "It shows the syncing process status, and contains links to your projects and" " the event log." @@ -367,11 +340,11 @@ msgstr "" "Mostra o status do processo de sincronização, e contém links para seus " "projetos e log de eventos." -#: ../SparkleShare/SparkleSetup.cs:538 +#: ../SparkleShare/SparkleSetup.cs:545 msgid "Adding projects to SparkleShare" msgstr "Adicionando projetos ao SparkleShare" -#: ../SparkleShare/SparkleSetup.cs:539 +#: ../SparkleShare/SparkleSetup.cs:546 msgid "" "Just click this button when you see it on the web, and the project will be " "automatically added:" @@ -379,7 +352,7 @@ msgstr "" "Basta clicar neste botão quando você vê-la na web, e o projeto será " "automaticamente adicionado automaticamente:" -#: ../SparkleShare/SparkleSetup.cs:542 +#: ../SparkleShare/SparkleSetup.cs:549 msgid "" "…or select ‘Add Hosted Project…’ from the status icon menu to add one" " by hand." @@ -387,34 +360,33 @@ msgstr "" "... Ou selecione 'Adicionar Projeto Hosted ...' no menu do ícone de " "status para adicionar manualmente." -#. Opens the wizard to add a new remote folder -#: ../SparkleShare/SparkleSetup.cs:551 -#: ../SparkleShare/SparkleStatusIcon.cs:238 -msgid "Add Hosted Project…" -msgstr "Adicionar Projeto Hospedado..." - #: ../SparkleShare/SparkleSetupWindow.cs:44 msgid "SparkleShare Setup" msgstr "Configurações do SparkleShare" -#: ../SparkleShare/SparkleStatusIcon.cs:228 +#: ../SparkleShare/SparkleStatusIcon.cs:230 msgid "No projects yet" msgstr "Nenhum projeto ainda" -#: ../SparkleShare/SparkleStatusIcon.cs:262 +#. Opens the wizard to add a new remote folder +#: ../SparkleShare/SparkleStatusIcon.cs:240 +msgid "Add Hosted Project…" +msgstr "Adicionar Projeto Hospedado..." + +#: ../SparkleShare/SparkleStatusIcon.cs:264 msgid "Open Recent Events" msgstr "Abrir Eventos Recentes" -#: ../SparkleShare/SparkleStatusIcon.cs:282 +#: ../SparkleShare/SparkleStatusIcon.cs:284 msgid "Turn Notifications Off" msgstr "Desligar as notificações" -#: ../SparkleShare/SparkleStatusIcon.cs:284 +#: ../SparkleShare/SparkleStatusIcon.cs:286 msgid "Turn Notifications On" msgstr "Ligar as notificações" #. A menu item that quits the application -#: ../SparkleShare/SparkleStatusIcon.cs:311 +#: ../SparkleShare/SparkleStatusIcon.cs:313 msgid "Quit" msgstr "Sair"