diff --git a/.github/ISSUE_TEMPLATE.md b/.github/ISSUE_TEMPLATE.md index a180d0516..c17e1dc1c 100644 --- a/.github/ISSUE_TEMPLATE.md +++ b/.github/ISSUE_TEMPLATE.md @@ -10,6 +10,7 @@ Have you read GitHub for Unity's Code of Conduct? By filing an Issue, you are ex - Include the log file in the PR. - On Windows, the extension log file is at `%LOCALAPPDATA%\GitHubUnity\github-unity.log` - On macOS, the extension log file is at `~/Library/Logs/GitHubUnity/github-unity.log` + - On linux, the extension log file is at `~/.local/share/GitHubUnity/github-unity.log` ### Description diff --git a/.gitignore b/.gitignore index 67d4e53bc..7fc38bf13 100644 --- a/.gitignore +++ b/.gitignore @@ -7,6 +7,5 @@ _NCrunch_GitHub.Unity .DS_Store build/ TestResult.xml -submodules/ *.stackdump *.lastcodeanalysissucceeded \ No newline at end of file diff --git a/.gitmodules b/.gitmodules index d9e14a3e9..8ecda521f 100644 --- a/.gitmodules +++ b/.gitmodules @@ -1,3 +1,6 @@ [submodule "script"] path = script url = git@github.com:github-for-unity/UnityBuildScripts +[submodule "submodules/packaging"] + path = submodules/packaging + url = https://github.com/github-for-unity/packaging diff --git a/GitHub.Unity.sln b/GitHub.Unity.sln index 0197b47fc..0707a70fa 100644 --- a/GitHub.Unity.sln +++ b/GitHub.Unity.sln @@ -5,8 +5,12 @@ VisualStudioVersion = 14.0.25420.1 MinimumVisualStudioVersion = 10.0.40219.1 Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "GitHub.Unity", "src\UnityExtension\Assets\Editor\GitHub.Unity\GitHub.Unity.csproj", "{ADD7A18B-DD2A-4C22-A2C1-488964EFF30A}" EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "GitHub.Unity.45", "src\UnityExtension\Assets\Editor\GitHub.Unity\GitHub.Unity.45.csproj", "{ADD7A18B-DD2A-4C22-A2C1-488964EFF30B}" +EndProject Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "GitHub.Api", "src\GitHub.Api\GitHub.Api.csproj", "{B389ADAF-62CC-486E-85B4-2D8B078DF763}" EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "GitHub.Api.45", "src\GitHub.Api\GitHub.Api.45.csproj", "{B389ADAF-62CC-486E-85B4-2D8B078DF76B}" +EndProject Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "GitHub.Logging", "src\GitHub.Logging\GitHub.Logging.csproj", "{BB6A8EDA-15D8-471B-A6ED-EE551E0B3BA0}" EndProject Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "CopyLibrariesToDevelopmentFolder", "src\packaging\CopyLibrariesToDevelopmentFolder\CopyLibrariesToDevelopmentFolder.csproj", "{44257C81-EE4A-4817-9AF4-A26C02AA6DD4}" @@ -31,6 +35,10 @@ Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "TestWebServer", "src\tests\ EndProject Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "UnityTests", "src\UnityExtension\Assets\Editor\UnityTests\UnityTests.csproj", "{462CDBD4-0DDA-4854-1B13-CFDACBFB66F5}" EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "ExtensionLoader", "src\UnityExtension\Assets\Editor\GitHub.Unity\ExtensionLoader\ExtensionLoader.csproj", "{6B0EAB30-511A-44C1-87FE-D9AB7E34D115}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "UnityShim", "src\UnityShim\UnityShim.csproj", "{F94F8AE1-C171-4A83-89E8-6557CA91A188}" +EndProject Global GlobalSection(SolutionConfigurationPlatforms) = preSolution Debug|Any CPU = Debug|Any CPU @@ -46,6 +54,13 @@ Global {ADD7A18B-DD2A-4C22-A2C1-488964EFF30A}.dev|Any CPU.Build.0 = dev|Any CPU {ADD7A18B-DD2A-4C22-A2C1-488964EFF30A}.Release|Any CPU.ActiveCfg = Release|Any CPU {ADD7A18B-DD2A-4C22-A2C1-488964EFF30A}.Release|Any CPU.Build.0 = Release|Any CPU + {ADD7A18B-DD2A-4C22-A2C1-488964EFF30B}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {ADD7A18B-DD2A-4C22-A2C1-488964EFF30B}.Debug|Any CPU.Build.0 = Debug|Any CPU + {ADD7A18B-DD2A-4C22-A2C1-488964EFF30B}.DebugNoUnity|Any CPU.ActiveCfg = Debug|Any CPU + {ADD7A18B-DD2A-4C22-A2C1-488964EFF30B}.dev|Any CPU.ActiveCfg = dev|Any CPU + {ADD7A18B-DD2A-4C22-A2C1-488964EFF30B}.dev|Any CPU.Build.0 = dev|Any CPU + {ADD7A18B-DD2A-4C22-A2C1-488964EFF30B}.Release|Any CPU.ActiveCfg = Release|Any CPU + {ADD7A18B-DD2A-4C22-A2C1-488964EFF30B}.Release|Any CPU.Build.0 = Release|Any CPU {B389ADAF-62CC-486E-85B4-2D8B078DF763}.Debug|Any CPU.ActiveCfg = Debug|Any CPU {B389ADAF-62CC-486E-85B4-2D8B078DF763}.Debug|Any CPU.Build.0 = Debug|Any CPU {B389ADAF-62CC-486E-85B4-2D8B078DF763}.DebugNoUnity|Any CPU.ActiveCfg = Debug|Any CPU @@ -54,6 +69,14 @@ Global {B389ADAF-62CC-486E-85B4-2D8B078DF763}.dev|Any CPU.Build.0 = dev|Any CPU {B389ADAF-62CC-486E-85B4-2D8B078DF763}.Release|Any CPU.ActiveCfg = Release|Any CPU {B389ADAF-62CC-486E-85B4-2D8B078DF763}.Release|Any CPU.Build.0 = Release|Any CPU + {B389ADAF-62CC-486E-85B4-2D8B078DF76B}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {B389ADAF-62CC-486E-85B4-2D8B078DF76B}.Debug|Any CPU.Build.0 = Debug|Any CPU + {B389ADAF-62CC-486E-85B4-2D8B078DF76B}.DebugNoUnity|Any CPU.ActiveCfg = Debug|Any CPU + {B389ADAF-62CC-486E-85B4-2D8B078DF76B}.DebugNoUnity|Any CPU.Build.0 = Debug|Any CPU + {B389ADAF-62CC-486E-85B4-2D8B078DF76B}.dev|Any CPU.ActiveCfg = dev|Any CPU + {B389ADAF-62CC-486E-85B4-2D8B078DF76B}.dev|Any CPU.Build.0 = dev|Any CPU + {B389ADAF-62CC-486E-85B4-2D8B078DF76B}.Release|Any CPU.ActiveCfg = Release|Any CPU + {B389ADAF-62CC-486E-85B4-2D8B078DF76B}.Release|Any CPU.Build.0 = Release|Any CPU {BB6A8EDA-15D8-471B-A6ED-EE551E0B3BA0}.Debug|Any CPU.ActiveCfg = Debug|Any CPU {BB6A8EDA-15D8-471B-A6ED-EE551E0B3BA0}.Debug|Any CPU.Build.0 = Debug|Any CPU {BB6A8EDA-15D8-471B-A6ED-EE551E0B3BA0}.DebugNoUnity|Any CPU.ActiveCfg = Debug|Any CPU @@ -132,6 +155,22 @@ Global {462CDBD4-0DDA-4854-1B13-CFDACBFB66F5}.dev|Any CPU.Build.0 = Debug|Any CPU {462CDBD4-0DDA-4854-1B13-CFDACBFB66F5}.Release|Any CPU.ActiveCfg = Release|Any CPU {462CDBD4-0DDA-4854-1B13-CFDACBFB66F5}.Release|Any CPU.Build.0 = Release|Any CPU + {6B0EAB30-511A-44C1-87FE-D9AB7E34D115}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {6B0EAB30-511A-44C1-87FE-D9AB7E34D115}.Debug|Any CPU.Build.0 = Debug|Any CPU + {6B0EAB30-511A-44C1-87FE-D9AB7E34D115}.DebugNoUnity|Any CPU.ActiveCfg = Debug|Any CPU + {6B0EAB30-511A-44C1-87FE-D9AB7E34D115}.DebugNoUnity|Any CPU.Build.0 = Debug|Any CPU + {6B0EAB30-511A-44C1-87FE-D9AB7E34D115}.dev|Any CPU.ActiveCfg = dev|Any CPU + {6B0EAB30-511A-44C1-87FE-D9AB7E34D115}.dev|Any CPU.Build.0 = dev|Any CPU + {6B0EAB30-511A-44C1-87FE-D9AB7E34D115}.Release|Any CPU.ActiveCfg = Release|Any CPU + {6B0EAB30-511A-44C1-87FE-D9AB7E34D115}.Release|Any CPU.Build.0 = Release|Any CPU + {F94F8AE1-C171-4A83-89E8-6557CA91A188}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {F94F8AE1-C171-4A83-89E8-6557CA91A188}.Debug|Any CPU.Build.0 = Debug|Any CPU + {F94F8AE1-C171-4A83-89E8-6557CA91A188}.DebugNoUnity|Any CPU.ActiveCfg = Debug|Any CPU + {F94F8AE1-C171-4A83-89E8-6557CA91A188}.DebugNoUnity|Any CPU.Build.0 = Debug|Any CPU + {F94F8AE1-C171-4A83-89E8-6557CA91A188}.dev|Any CPU.ActiveCfg = dev|Any CPU + {F94F8AE1-C171-4A83-89E8-6557CA91A188}.dev|Any CPU.Build.0 = dev|Any CPU + {F94F8AE1-C171-4A83-89E8-6557CA91A188}.Release|Any CPU.ActiveCfg = Release|Any CPU + {F94F8AE1-C171-4A83-89E8-6557CA91A188}.Release|Any CPU.Build.0 = Release|Any CPU EndGlobalSection GlobalSection(SolutionProperties) = preSolution HideSolutionNode = FALSE @@ -147,4 +186,7 @@ Global {3DD3451C-30FA-4294-A3A9-1E080342F867} = {D17F1B4C-42DC-4E78-BCEF-9F239A084C4D} {462CDBD4-0DDA-4854-1B13-CFDACBFB66F5} = {D17F1B4C-42DC-4E78-BCEF-9F239A084C4D} EndGlobalSection + GlobalSection(ExtensibilityGlobals) = postSolution + SolutionGuid = {66BD4D50-3779-4912-9596-2C838BF24911} + EndGlobalSection EndGlobal diff --git a/GitHub.Unity.sln.DotSettings b/GitHub.Unity.sln.DotSettings index 31c5e56f1..2166c735a 100644 --- a/GitHub.Unity.sln.DotSettings +++ b/GitHub.Unity.sln.DotSettings @@ -22,8 +22,11 @@ END_OF_LINE 1 1 + False + False False True + NEVER False True False @@ -339,8 +342,13 @@ SSH <Policy Inspect="True" Prefix="" Suffix="" Style="aaBb" /> <Policy Inspect="True" Prefix="" Suffix="" Style="aaBb" /> + True + True + True + True True True + True True True True diff --git a/LICENSE b/LICENSE index 9f06ebe84..a306528ee 100644 --- a/LICENSE +++ b/LICENSE @@ -1,6 +1,6 @@ MIT License -Copyright (c) 2016-2018 GitHub +Copyright (c) 2016-2019 GitHub Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal diff --git a/README.md b/README.md index 236bdd910..1cd67b951 100644 --- a/README.md +++ b/README.md @@ -1,14 +1,16 @@ # [GitHub for Unity](https://unity.github.com) -The GitHub for Unity extension brings [Git](https://git-scm.com/) and GitHub into [Unity](https://unity3d.com/), integrating source control into your work with friendly and accessible tools and workflows. +## NOTICE OF DEPRECATION -You can reach the team right here by opening a [new issue](https://github.com/github-for-unity/Unity/issues/new), or by joining one of the chats below. You can also email us at unity@github.com, or tweet at [@GitHubUnity](https://twitter.com/GitHubUnity) +This project is dead y'all! Remove GitHub for Unity from your project, then go to https://github.com/spoiledcat/git-for-unity and install Git for Unity from the instructions there. -[![Build Status](https://ci.appveyor.com/api/projects/status/github/github-for-unity/Unity?branch=master&svg=true)](https://ci.appveyor.com/project/github-windows/unity) +# What is it + +The GitHub for Unity extension brings [Git](https://git-scm.com/) and GitHub into [Unity](https://unity3d.com/), integrating source control into your work with friendly and accessible tools and workflows. -[![Join the chat at https://discord.gg/5zH8hVx](https://img.shields.io/badge/discord-join%20chat-7289DA.svg)](https://discord.gg/5zH8hVx) -[![GitHub for Unity live coding on Twitch](https://img.shields.io/badge/twitch-live%20coding-6441A4.svg)](https://www.twitch.tv/sh4na) +You can reach the team right here by opening a [new issue](https://github.com/github-for-unity/Unity/issues/new). You can also tweet at [@GitHubUnity](https://twitter.com/GitHubUnity) +[![Build Status](https://ci.appveyor.com/api/projects/status/github/github-for-unity/Unity?branch=master&svg=true)](https://ci.appveyor.com/project/github-windows/unity) ## Notices @@ -16,169 +18,19 @@ Please refer to the [list of known issues](https://github.com/github-for-unity/U From version 0.19 onwards, the location of the plugin has moved to `Assets/Plugins/GitHub`. If you have version 0.18 or lower, you need to delete the `Assets/Editor/GitHub` folder before you install newer versions. You should exit Unity and delete the folder from Explorer/Finder, as Unity will not unload native libraries while it's running. Also, remember to update your `.gitignore` file. -#### Table Of Contents - -[Installing GitHub for Unity](#installing-github-for-unity) - * [Requirements](#requirements) - * [Git on macOS](#git-on-macos) - * [Git on Windows](#git-on-windows) - * [Installation](#installation) - * [Log files](#log-files) - * [Windows](#windows) - * [macOS](#macos) - -[Building and Contributing](#building-and-contributing) - -[Quick Guide to GitHub for Unity](#quick-guide-to-github-for-unity) - * [Opening the GitHub window](#opening-the-github-window) - * [Initialize Repository](#initialize-repository) - * [Authentication](#authentication) - * [Publish a new repository](#publish-a-new-repository) - * [Commiting your work - Changes tab](#commiting-your-work---changes-tab) - * [Pushing/pulling your work - History tab](#pushingpulling-your-work---history-tab) - * [Branches tab](#branches-tab) - * [Settings tab](#settings-tab) - -[More Resources](#more-resources) - -[License](#license) - -## Installing GitHub for Unity - -### Requirements - -- Unity 5.4 or higher - - There's currently an blocker issue opened for 5.3 support, so we know it doesn't run there. Personal edition is fine. -- Git and Git LFS 2.x - -#### Git on macOS - -The current release has limited macOS support. macOS users will need to install the latest [Git](https://git-scm.com/downloads) and [Git LFS](https://git-lfs.github.com/) manually, and make sure these are on the path. You can configure the Git location in the Settings tab on the GitHub window. - -The easiest way of installing git and git lfs is to install [Homebrew](https://brew.sh/) and then do `brew install git git-lfs`. - -Make sure a Git user and email address are set in the `~/.gitconfig` file before you initialize a repository for the first time. You can set these values by opening your `~/.gitconfig` file and adding the following section, if it doesn't exist yet: - -``` -[user] - name = Your Name - email = Your Email -``` - -#### Git on Windows - -The GitHub for Unity extension ships with a bundle of Git and Git LFS, to ensure that you have the correct version. These will be installed into `%LOCALAPPDATA%\GitHubUnity` when the extension runs for the first time. - -Make sure a Git user and email address are set in the `%HOME%\.gitconfig` file before you initialize a repository for the first time. You can set these values by opening your `%HOME%\.gitconfig` file and adding the following section, if it doesn't exist yet: - -``` -[user] - name = Your Name - email = Your Email -``` - -Once the extension is installed, you can open a command line with the same Git and Git LFS version that the extension uses by going to `Window` -> `GitHub Command Line` in Unity. - -### Installation - -This extensions needs to be installed (and updated) for each Unity project that you want to version control. -First step is to download the latest package from [the releases page](https://github.com/github-for-unity/Unity/releases); -it will be saved as a file with the extension `.unitypackage`. -To install it, open Unity, then open the project you want to version control, and then double click on the downloaded package. -Alternatively, import the package by clicking Assets, Import Package, Custom Package, then select the downloaded package. - -#### Log files - -##### macOS - -The extension log file can be found at `~/Library/Logs/GitHubUnity/github-unity.log` - -##### Windows - -The extension log file can be found at `%LOCALAPPDATA%\GitHubUnity\github-unity.log` - ## Building and Contributing -The [CONTRIBUTING.md](CONTRIBUTING.md) document will help you get setup and familiar with the source. The [documentation](docs/) folder also contains more resources relevant to the project. - Please read the [How to Build](docs/contributing/how-to-build.md) document for information on how to build GitHub for Unity. -If you're looking for something to work on, check out the [up-for-grabs](https://github.com/github-for-unity/Unity/issues?q=is%3Aopen+is%3Aissue+label%3Aup-for-grabs) label. - - -## I have a problem with GitHub for Unity - -First, please search the [open issues](https://github.com/github-for-unity/Unity/issues?q=is%3Aopen) -and [closed issues](https://github.com/github-for-unity/Unity/issues?q=is%3Aclosed) -to see if your issue hasn't already been reported (it may also be fixed). - -If you can't find an issue that matches what you're seeing, open a [new issue](https://github.com/github-for-unity/Unity/issues/new) -and fill out the template to provide us with enough information to investigate -further. - -## Quick Guide to GitHub for Unity - -### Opening the GitHub window - -You can access the GitHub window by going to Windows -> GitHub. The window opens by default next to the Inspector window. - -### Initialize Repository - -![Initialize repository screenshot](https://user-images.githubusercontent.com/10103121/37807041-bb4446a6-2e19-11e8-9fff-a431309b8515.png) - -If the current Unity project is not in a Git repository, the GitHub for Unity extension will offer to initialize the repository for you. This will: - -- Initialize a git repository at the Unity project root via `git init` -- Initialize git-lfs via `git lfs install` -- Set up a `.gitignore` file at the Unity project root. -- Set up a `.gitattributes` file at the Unity project root with a large list of known binary filetypes (images, audio, etc) that should be tracked by LFS -- Configure the project to serialize meta files as text -- Create an initial commit with the `.gitignore` and `.gitattributes` file. - -### Authentication - -To set up credentials in Git so you can push and pull, you can sign in to GitHub by going to `Window` -> `GitHub` -> `Account` -> `Sign in`. You only have to sign in successfully once, your credentials will remain on the system for all Git operations in Unity and outside of it. If you've already signed in once but the Account dropdown still says `Sign in`, ignore it, it's a bug. - -![Authentication screenshot](https://user-images.githubusercontent.com/121322/27644895-8f22f904-5bd9-11e7-8a93-e6bfe0c24a74.png) - -### Publish a new repository - -1. Go to [github.com](https://github.com) and create a new empty repository - do not add a license, readme or other files during the creation process. -2. Copy the **https** URL shown in the creation page -3. In Unity, go to `Windows` -> `GitHub` -> `Settings` and paste the url into the `Remote` textbox. -3. Click `Save repository`. -4. Go to the `History` tab and click `Push`. - -### Commiting your work - Changes tab - -You can see which files have been changed and commit them through the Changes tab. `.meta` files will show up in relation to their files on the tree, so you can select a file for comitting and automatically have their `.meta` - -![Changes tab screenshot](https://user-images.githubusercontent.com/121322/27644933-ab00af72-5bd9-11e7-84c3-edec495f87f5.png) - -### Pushing/pulling your work - History tab - -The history tab includes a `Push` button to push your work to the server. Make sure you have a remote url configured in the `Settings` tab so that you can push and pull your work. - -To receive updates from the server by clicking on the `Pull` button. You cannot pull if you have local changes, so commit your changes before pulling. - -![History tab screenshot](https://user-images.githubusercontent.com/121322/27644965-c1109bba-5bd9-11e7-9257-4fa38f5c67d1.png) - -### Branches tab - -![Branches tab screenshot](https://user-images.githubusercontent.com/121322/27644978-cd3c5622-5bd9-11e7-9dcb-6ae5d5c7dc8a.png) - -### Settings tab - -You can configure your user data in the Settings tab, along with the path to the Git installation. +The [CONTRIBUTING.md](CONTRIBUTING.md) document will help you get setup and familiar with the source. The [documentation](docs/) folder also contains more resources relevant to the project. -Locked files will appear in a list in the Settings tab. You can see who has locked a file and release file locks after you've pushed your work. +If you're looking for something to work on, check out the [up-for-grabs](https://github.com/github-for-unity/Unity/issues?q=is%3Aopen+is%3Aissue+label%3Aup-for-grabs) label. -![Settings tab screenshot](https://user-images.githubusercontent.com/121322/27644993-d9d325a0-5bd9-11e7-86f5-beee00e9e8b8.png) +## How to use -## More Resources +The [quick guide to GitHub for Unity](docs/using/quick-guide.md) -See [unity.github.com](https://unity.github.com) for more product-oriented -information about GitHub for Unity. +More [in-depth information](docs/readme.md) ## License @@ -188,6 +40,6 @@ The MIT license grant is not for GitHub's trademarks, which include the logo designs. GitHub reserves all trademark and copyright rights in and to all GitHub trademarks. GitHub's logos include, for instance, the stylized Invertocat designs that include "logo" in the file title in the following -folder: [IconsAndLogos](https://github.com/github-for-unity/Unity/tree/master/src/UnityExtension/Assets/Editor/GitHub.Unity/IconsAndLogos). +folder: [IconsAndLogos](src/UnityExtension/Assets/Editor/GitHub.Unity/IconsAndLogos). Copyright 2015 - 2018 GitHub, Inc. diff --git a/appveyor.yml b/appveyor.yml index 3736c7189..bfb3e1b77 100644 --- a/appveyor.yml +++ b/appveyor.yml @@ -1,35 +1,28 @@ -version: '{build}.{branch}' environment: GHFU_KEY: secure: KFcQA1VOCEMGUgy2dxH8G5O7C9DsAtQrnc6LakFpd9BRFtNnt2E8RSadPoJwQ9gztWaLS8vQLdU7cV5Ivt01LOnPI2kU1fQd2SHtKwJFve8ppvK/yZ/luhiXvIdGeEAiXQyuc1WUwuECoJVA6n7/uQKr1Q+eHniMitHuFpyQ7OqnwF6f+4TeBS6D78fKd3QoeP4XDdxCjNWPNmLv7BvWFMm5CuTK9aAWhOy8em/nVIED8qt36uHnncsDn+DH7uunj+VwmhVS+4yhuKHHz5naiUAHHIziZ4wBW6Q8rcf7xYEeISjlfxJ6TXs4Wwp406AO+n3v1DZaxSXwvoxplfopeGyb6imJfbwdTU/MHf2uj9wXobR8UhDarcrugVW+J3bqZyvkg20HfSe80gfQUBlK5OdMAp58dhWkddvJSO5TnzqmLlo/60gZxjheIbjdLaavSKcM4xOALXQlBbJJgVQrB/F6tYf7pRK3BlS8VoakyOGjJRzsSNdssSVrLVW3rwANORbH6Z1ZYvQw2ObP6/EBMceer0+JV4y9zB5q9C68erlr1NtJB0xKUp9/7I5GQj4lJ+pDsaFdsj40SyyD4yazSZf/3VIhZi/rQTJm0Ft0ifTZGSxnNTOVMZ5fsoJmUL0bn75Xt9q59cKYzK041HtEzRSElnBeuTf+Sm/MLLV28P1sonwntMhcYQ5ZPIuGmKa8jAJ0kxPXyT56MPJpwbNrbCOw2t9hXg5QbYv/+0RcoRoJ7P/5OY3M6mj8Emtu8N0SFKD6lfv9KNLFAyBuF6Ml7RyOs5RRogIdEapegTY7jwGH7igibrP0lt6HYshM7hKQrRYm2saokBV9TfgV9qnPyGs5/zyTTUGW4y0LavxiXl/vQIpxwCg4liCf1Dgw4Zrxvh40bziKGc3X06RJcysJ8cskOi6gB8eK8VTbAy6/Ufgv+pyjIns1iJxmdVbl8MrlgPXmOephdRPYZJDiFd4ynw0Slm1mqbzPWHdQ/mtMGxNRcysIOPKzKaKvK7Syor5SNtv4HU097dyjVQyW9krHaX/DSnx++dMDCIZJEDYxFw7LmTvl6AFWU4C3HM7+26cHBuBMBYS1PwcRijG+hwsHiXIomVuVglcxp5HC2eFbtBF9h1g030/tCeIBhZIVdSVO78319CsL1+aVtI5WjeQglH1OcTS42OF1Nb/4EjaN4I/w6yRJU5dmK7Q+rHQ+7NnPT4n5flQV6oe1XNbLen0raDuGos6v+aaoOQ50HlCSMMBJ3liapVXIAQ+Z/XM/cNZZa1TB6/C363Hjrts6Uq3IjKXmomhA33je+Wl6mTZqBucXUJs76p+ZgKubWvfzK/e6tORJggAgFNoa9Y56r3t7J8UdUolt301I1cCVz9CvYMUsWTmjKTR5SpssbbcuRcFUhgkHrhsSq0rBef/dfn5VEP8sateiqbpMne5iWO4Wc0Qx2/fJfx2zf9oqO3MihAyYDwPI7JCrmccJY9cHV89YlrytZuh87aBQ8d+T5ELdeG3bbYGYbKJ/yOGxMo8cRSomWUp2809Ea/lgu6WjyY1SdEjh20fwONOTRd/AA3h3XIMU7NY999YOEy1zj3yeP+awozDWQU8GSojA/ceLU5HT0U/RT2XJgkzUFb/u8wanNG6InPqvZy01bLR8JZmqXZl/4XgNpEsbzwPxjzuZJP6O4f2Usq1ZmHUfwlouXLWuHTv5/DPYJ9kO91wjzAH46IsoadmQkRDomHYCDPRCnYoS2zBkmBNukCgCKWsSwD5Msw/tpgIorMi5AAFhIOeWt/7tcKZ5nslbbnmZFtDkOBriPEiOjrAziRqFNAdBjecMrckRQrlkYjkpJG/pXZXYq219/8Sy1/HdNrWTCdq7nc947Fvq41CfumT4c2TqhVc/oflJ9SaxIl2A1Vtbw5LkmQKL08vOitsyZgRupcbqLcSYGo2dG+ks0gK5o2rvNp1nyN3ADh7JFmtD7/og55zlsAj7wP6rLEZ44h+dk7+Sh96WoCPfzSJnchg2vsydTpK6sG3Cp5qjEk0Tps88nX0SPvEPxwEpBL08+XIlg5OVxiTIYI0NSeEjAxwip2ptgaeI4c0yPB5SahMArEw/8YeEflWhiDyjoG4Bw0O1v9fRSSYiFhcYwrkr6yBK81hx6uH6DzqDqtKOxwJ3kKhapfwZXStmeOt4AwiSUHO8TiX1t0i7Jqwl7mRduz3LfmqGCeEsNxnLuhc6MPeEva8LO8ILCEcVz8bHlwUMWqabdZRm9UtbWtZp/u8ffPSBbgNFna2kKFr/F7dmXiv18CpNHOGxb/rSdmIaov1nXJR7XUyKPRO548PHE6iNxuNjWjcuFw1L0IUCNEeVUs7tvNHUTYOXRvfXNm3DbhjFnGix2JVCB2xz6QhDV4Hh6y0/rJl0b2dW25iM5HZdwCBGwgGM+9HyD7r+OBiRn+rd996c81+JsWL4jsa//16uwcbEpsF3tAB7b0by4qHbeZ+Gs3M06Sje4UVpLgKQVHSd/hfo4M70v3APhyz0WFBhLLZyouz0OdazKZ4W+HGBcunAPw/sYdMYZLe4ZmA6B+wxtSzojNKFaCFWoh3S5vLClZTraj7Mhh02PPsY0fmo15ceHBwKjMfGZ0pXt8uiPL29ECUstxSLVnPv6M4uXPJa7k+0lvj7XdB7aJ/LzexPAa/Z1+hsr2sO9An5qPnKM5Tp5zj9Xq2T7WBiDObYLxYZX5ez32jKfSYgv3cpIo5HnhKB3rZL3Alp6iJ2NFsDiB6pIUc2YQ3UU8wiMU90ifA83ORttzRDdLCuH1lYCHPk8rcVqeydgNrI4pRVrdIah3wm6hHc7YjSSnjIOhcl286iVtYgn10RUKxcs//ElgoGm0IkefKRy2WcDDL+10ZifpSWxRu0yrpwlxd0uHCAhrkOEnvaamn+0TSu/6s9VxoUyn9ZJhY7Jgnb6Z9Qxi4C+u2vXf6lOQvzl4AawnD9DW+w2L6hr2njGhvgjj2VLIHM/GIOV/OaYW97AiW0NBuEGDyBiuj8TxIUL7IuVj+QZVfyUzZHHL0c0Hy4jlQ+sh2nFzOAGWVZwEdAvLl9JCCs46iA9DHtBSrHxit7lytyspp7q8TYfE1lA0pIwkx20E3t+4CNdUQAr/IJaZJxhdfKAyW3UipP4LdRbweyYHZYFkoN0gEDMrzE0yB7XFNw5ddm/+o8KIuSUl44UVFcp2j0KPfuXadx7Pz1aa5HKpVUdc5CfJOjqgPJFn/MQU702YdUaV0qD+EHDOiVv313gUHdy9kpieQ3s2LDSh0qBkPdxLAdYXKLP24Mj3V+A2lyHU1WtLrIEVP37eCAFSYPf6Lz6TW4zrEBpHF4nwlE8M+0jQ/oB4lINxnkCa3YKYLFMiZ3dAmqGzVElesgymmB21xvdfrHgB1Z5OtQqYT8PPAw6llujXv6Pj9CqDGGS4U8UeW5GCFi/qyV6+hdg2IUsWtSzkbLJ5n8cfafEYeRBRgzK/B6qlTmoOrRl+bzmjVCJX29P+38KCpu7srnSQ+T0fR6t0OWyHGfC/39iMzATnhpiIXdnngVV9Cypgod5we44C2Rb4Or/nr5mdEidElIIthDiD7GHPNSeMXrdxs+ow76rh42DiY7x0L0SMRWyUEz0seL1JdBCdNn/7LuSn4CVpggqZD8anf9n+IUjrJtqQ+AvaogfuxM65byhGK4iVIijrogfBHb4nGywXxeEKe03JJ8nOWWN2ndyNhMW1dfNGraHvAt7DWL+/tp4qKCA89VFaZjwsqINANF1VVwh96SB6qT4tlKJjaPD3YpawT6Jfs+cg3pMj36FIPzHoNd/r+LwCBZ0WiA5xZiO0DX6WhwTfJVStsz4i9VXElCmWF2dpf5kTEC0T62Y1VCc++M1cTfwX34mdHPvdsm1Vi1qpqz4HTez8ateFukyj1FIN7++eYWoBJBoclhb3y/VUFwepORi84pz1fXUSSl8Fpg2U7NRyj+gcM5v/VAC1FGR4CJVpODIdROF7mCrLTbPzLn8Fv7EJHgHKNeU/sIT13+5V/UJSZPAxWcaUKhRWWuShSVb/1U13LjiWkHvmuH7SVLHbJDO5C5lA589rz4weTMd1OSymPuNB/xj2d2YrJUwqB3olsaxwm8w/bs2ot4GF4HFAdx3l0ESiR8jkBNAvr6vwRcXv+7nfXRpx2Mo5QU2YaunbqZxibmtNCQZBH8ZpQyUZOek4A5qDh6HW2VyJqKXeE8u1fbtOzB9xDYxgTrlVFhCw== -clone_script: -- ps: >- - if(-not $env:appveyor_pull_request_number) { - git lfs clone -q -n --branch=$env:appveyor_repo_branch https://github.com/$env:appveyor_repo_name.git $env:appveyor_build_folder - git checkout -qf $env:appveyor_repo_commit - } else { - git lfs clone -q -n https://github.com/$env:appveyor_repo_name.git $env:appveyor_build_folder - git fetch -q origin +refs/pull/$env:appveyor_pull_request_number/merge: - git lfs fetch origin FETCH_HEAD - git checkout -qf FETCH_HEAD - } - - Set-Location $env:appveyor_build_folder + matrix: + - node_version: '8' install: - ps: >- + $full_build = Test-Path env:GHFU_KEY + + $package = $full_build + git submodule sync git submodule init - $full_build = Test-Path env:GHFU_KEY - if ($full_build) { + $env:BUILD_TYPE="full" $fileContent = "-----BEGIN RSA PRIVATE KEY-----`n" $fileContent += $env:GHFU_KEY.Replace(' ', "`n") $fileContent += "`n-----END RSA PRIVATE KEY-----`n" Set-Content c:\users\appveyor\.ssh\id_rsa $fileContent + Install-Product node $env:node_version } else { + $env:BUILD_TYPE="partial" git submodule deinit script $destdir = Join-Path $env:appveyor_build_folder 'lib' $destfile = Join-Path $destdir 'deps.zip' @@ -43,6 +36,24 @@ install: nuget restore GitHub.Unity.sln + Set-Location $env:appveyor_build_folder + + $version = Get-Content "$($env:appveyor_build_folder)\common\SolutionInfo.cs" | %{ $regex = "const string GitHubForUnityVersion = `"([^`"]*)`""; if ($_ -match $regex) { $matches[1] } } + + $env:package_version="$($version).$($env:APPVEYOR_BUILD_NUMBER)" + + Update-AppveyorBuild -Version $env:package_version + + $message = "Building " + + if ($package) { $message += "and packaging "} + + if ($full_build) { $message += "(full build)" } else { $message += "(partial build)" } + + $message += " version " + $env:package_version + " " + + Write-Host $message + assembly_info: patch: false file: common\SolutionInfo.cs @@ -60,10 +71,37 @@ test: categories: except: - DoNotRunOnAppVeyor -artifacts: -- path: unity\PackageProject - type: zip - name: github-for-unity-packageproject -- path: build\*.log -on_failure: - - ps: Get-ChildItem build\*.log | % { Push-AppveyorArtifact $_.FullName -FileName $_.Name } +on_success: +- ps: | + if ($package) { + $rootdir=$env:appveyor_build_folder + Set-Location $rootdir + $sourcedir="$rootdir\unity\PackageProject" + $packagename="github-for-unity-$($env:package_version)" + $packagefile="$rootdir\$($packagename).unitypackage" + $commitfile="$sourcedir\commit" + $zipfile="$rootdir\PackageProject-$($env:package_version).zip" + + # generate mdb files + Write-Output "Generating mdb files" + Get-ChildItem -Recurse "$($sourcedir)\*.pdb" | foreach { $_.fullname.substring(0, $_.fullname.length - $_.extension.length) } | foreach { Write-Output "Generating $($_).mdb"; & 'lib\pdb2mdb.exe' "$($_).dll" } + + # generate unitypackage + Write-Output "Generating $packagefile" + submodules\packaging\unitypackage\run.ps1 -PathToPackage:$sourcedir -OutputFolder:$rootdir -PackageName:$packagename + + # save commit + Add-Content $commitfile $appveyor_repo_commit + + Write-Output "Zipping $sourcedir to $zipfile" + 7z a $zipfile $sourcedir + + Write-Output "Uploading $zipfile" + Push-AppveyorArtifact $zipfile -DeploymentName source + Push-AppveyorArtifact $packagefile -DeploymentName package + Push-AppveyorArtifact "$($packagefile).md5" -DeploymentName package + } +on_finish: +- ps: | + Set-Location $env:appveyor_build_folder + Get-ChildItem $env:appveyor_build_folder\build\*.log | % { Push-AppveyorArtifact $_.FullName -FileName $_.Name -DeploymentName logs } diff --git a/common/SolutionInfo.cs b/common/SolutionInfo.cs index 62edf1036..eb95db6a0 100644 --- a/common/SolutionInfo.cs +++ b/common/SolutionInfo.cs @@ -11,7 +11,7 @@ [assembly: AssemblyInformationalVersion(System.AssemblyVersionInformation.Version)] [assembly: ComVisible(false)] [assembly: AssemblyCompany("GitHub, Inc.")] -[assembly: AssemblyCopyright("Copyright GitHub, Inc. 2017-2018")] +[assembly: AssemblyCopyright("Copyright GitHub, Inc. 2016-2019")] [assembly: AssemblyConfiguration("")] [assembly: AssemblyTrademark("")] [assembly: AssemblyCulture("")] @@ -31,9 +31,10 @@ namespace System { internal static class AssemblyVersionInformation { - // this is for the AssemblyVersion and AssemblyVersion attributes, which can't handle alphanumerics - internal const string VersionForAssembly = "1.0.0"; - // Actual real version - internal const string Version = "1.0.0rc5"; + private const string GitHubForUnityVersion = "1.4.0"; + internal const string VersionForAssembly = GitHubForUnityVersion; + + // If this is an alpha, beta or other pre-release, mark it as such as shown below + internal const string Version = GitHubForUnityVersion; // GitHubForUnityVersion + "-beta1" } } diff --git a/common/packaging.targets b/common/packaging.targets index 77f866e20..a5ddd41a5 100644 --- a/common/packaging.targets +++ b/common/packaging.targets @@ -7,8 +7,9 @@ @@ -22,16 +23,16 @@ + Condition="!$([System.String]::Copy('%(Filename)').Contains('deleteme')) and !$([System.String]::Copy('%(Extension)').Contains('xml'))" /> - + - + diff --git a/common/properties.props b/common/properties.props index 977f217b3..8316329ed 100644 --- a/common/properties.props +++ b/common/properties.props @@ -3,7 +3,7 @@ Internal - ENABLE_METRICS + ENABLE_METRICS $(BuildDefs);ENABLE_MONO $(SolutionDir)script\lib\ diff --git a/create-octorun-zip.sh b/create-octorun-zip.sh new file mode 100755 index 000000000..4eb568d33 --- /dev/null +++ b/create-octorun-zip.sh @@ -0,0 +1,3 @@ +#!/bin/sh -eu +DIR=$(pwd) +submodules/packaging/octorun/run.sh --path $DIR/octorun --out $DIR/src/GitHub.Api/Resources --source $DIR/src/GitHub.Api/Installer diff --git a/create-unitypackage.sh b/create-unitypackage.sh new file mode 100755 index 000000000..333f7cb90 --- /dev/null +++ b/create-unitypackage.sh @@ -0,0 +1,9 @@ +#!/bin/sh -eu +DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" + +version=$(sed -En 's,.*GitHubForUnityVersion = "(.*)".*,\1,p' common/SolutionInfo.cs) +commitcount=$(git rev-list --count HEAD) +commit=$(git log -n1 --pretty=format:%h) +version="${version}.${commitcount}-${commit}" + +$DIR/submodules/packaging/unitypackage/run.sh --path $DIR/unity/PackageProject --out $DIR --file github-for-unity-$version diff --git a/docs/contributing/how-to-build.md b/docs/contributing/how-to-build.md index db494a224..13ef8c2fa 100644 --- a/docs/contributing/how-to-build.md +++ b/docs/contributing/how-to-build.md @@ -13,7 +13,7 @@ This repository is LFS-enabled. To clone it, you should use a git client that su ### MacOS -- Mono 4.x required. +- [Mono 4.x](https://download.mono-project.com/archive/4.8.1/macos-10-universal/) required. You can install it via brew with `brew tap shana/mono && brew install mono@4.8` - Mono 5.x will not work - `UnityEngine.dll` and `UnityEditor.dll`. - If you've installed Unity in the default location of `/Applications/Unity`, the build will be able to reference these DLLs automatically. Otherwise, you'll need to copy these DLLs from `[Unity installation path]/Unity.app/Contents/Managed` into the `lib` directory in order for the build to work @@ -35,12 +35,25 @@ git submodule deinit script ### Important pre-build steps -To be able to authenticate in GitHub for Unity, you'll need to: +The build needs to reference `UnityEngine.dll` and `UnityEditor.dll`. These DLLs are included with Unity. If you've installed Unity in the default location, the build will be able to find them automatically. If not, copy these DLLs from `[your Unity installation path]\Unity\Editor\Data\Managed` into the `lib` directory in order for the build to work. + +#### Developer OAuth app + +Because GitHub for Unity uses OAuth web application flow to interact with the GitHub API and perform actions on behalf of a user, it needs to be bundled with a Client ID and Secret. + +For external contributors, we have bundled a developer OAuth application in the source so that you can complete the sign in flow locally without needing to configure your own application. + +These are listed in `src/GitHub.Api/Application/ApplicationInfo.cs` + +DO NOT TRUST THIS CLIENT ID AND SECRET! THIS IS ONLY FOR TESTING PURPOSES!! + +The limitation with this developer application is that this will not work with GitHub Enterprise. You will see sign-in will fail on the OAuth callback due to the credentials not being present there. + +To provide your own Client ID and Client Secret: - [Register a new developer application](https://github.com/settings/developers) in your profile. - Copy [common/ApplicationInfo_Local.cs-example](../../common/ApplicationInfo_Local.cs-example) to `common/ApplicationInfo_Local.cs` and fill out the clientId/clientSecret fields for your application. -The build needs to reference `UnityEngine.dll` and `UnityEditor.dll`. These DLLs are included with Unity. If you've installed Unity in the default location, the build will be able to find them automatically. If not, copy these DLLs from `[your Unity installation path]\Unity\Editor\Data\Managed` into the `lib` directory in order for the build to work. ### Visual Studio @@ -56,13 +69,12 @@ Once you've built the solution for the first time, you can open `src/UnityExtens The build also creates a Unity test project called `GitHubExtension` inside a directory called `github-unity-test` next to your local clone. For instance, if the repository is located at `c:\Projects\Unity` the test project will be at `c:\Projects\github-unity-test\GitHubExtension`. You can use this project to test binary builds of the extension in a clean environment (all needed DLLs will be copied to it every time you build). -Note: some files might be locked by Unity if have one of the build output projects open when you compile from VS or the command line. This is expected and shouldn't cause issues with your builds. +Note: some files might be locked by Unity if have one of the build output projects open when you compile from VS or the command line. This is expected and shouldn't cause issues with your builds. ## Solution organization The `GitHub.Unity.sln` solution includes several projects: -- dotnet-httpclient35 and octokit: external dependencies for threading and github api support, respectively. These are the submodules. - packaging: empty projects with build rules that copy DLLs to various locations for testing - Tests: unit and integration test projects - GitHub.Logging: A logging helper library diff --git a/docs/readme.md b/docs/readme.md index ae4be69a9..d73e8e840 100644 --- a/docs/readme.md +++ b/docs/readme.md @@ -30,6 +30,8 @@ Details about how the team is organizing and shipping GitHub for Unity: ## Using +[Quick Guide](using/quick-guide.md) + These documents contain more details on how to use the GitHub for Unity plugin: -- **[Installing and Updating the GitHub for Unity package](https://github.com/github-for-unity/Unity/blob/master/docs/using/how-to-install-and-update.md)** -- **[Getting Started with the GitHub for Unity package](https://github.com/github-for-unity/Unity/blob/master/docs/using/getting-started.md)** +- **[Installing and Updating the GitHub for Unity package](using/how-to-install-and-update.md)** +- **[Getting Started with the GitHub for Unity package](using/getting-started.md)** diff --git a/docs/using/authenticating-to-github.md b/docs/using/authenticating-to-github.md new file mode 100644 index 000000000..203a3473d --- /dev/null +++ b/docs/using/authenticating-to-github.md @@ -0,0 +1,37 @@ +# Authenticating to GitHub + +## How to sign in to GitHub + +1. Open the **GitHub** window by going to the top level **Window** menu and selecting **GitHub**, as shown below. + + GitHub menu item in the Window menu + +1. Click the **Sign in** button at the top right of the window. + + GitHub menu item in the Window menu + +1. In the **Authenticate** dialog, enter your username or email and password + + GitHub menu item in the Window menu + + If your account requires Two Factor Authentication, you will be prompted for your auth code. + + GitHub menu item in the Window menu + +You will need to create a GitHub account before you can sign in, if you don't have one already. + +- For more information on creating a GitHub account, see "[Signing up for a new GitHub account](https://help.github.com/articles/signing-up-for-a-new-github-account/)". + +### Personal access tokens + +If the sign in operation above fails, you can manually create a personal access token and use it as your password. + +The scopes for the personal access token are: `user`, `repo`. +- *user* scope: Grants access to the user profile data. We currently use this to display your avatar and check whether your plans lets you publish private repositories. +- *repo* scope: Grants read/write access to code, commit statuses, invitations, collaborators, adding team memberships, and deployment statuses for public and private repositories and organizations. This is needed for all git network operations (push, pull, fetch), and for getting information about the repository you're currently working on. + +***Note:*** *Some older versions of the plugin ask for `gist` and `write:public_key`.* + +For more information on creating personal access tokens, see "[Creating a personal access token for the command line](https://help.github.com/articles/creating-a-personal-access-token-for-the-command-line). + +For more information on authenticating with SAML single sign-on, see "[About authentication with SAML single sign-on](https://help.github.com/articles/about-authentication-with-saml-single-sign-on)." diff --git a/docs/using/getting-started.md b/docs/using/getting-started.md index 11f2b008f..4c5892a96 100644 --- a/docs/using/getting-started.md +++ b/docs/using/getting-started.md @@ -19,8 +19,10 @@ And you should see the GitHub spinner: - History: A history of commits with title, time stamp, and commit author - Branches: A list of local and remote branches with the ability to create new branches, switch branches, or checkout remote branches - Settings: your git configuration (pulled from your local git credentials if they have been previously set), your repository configuration (you can manually put the URL to any remote repository here instead of using the Publish button to publish to GitHub), a list of locked files, your git installation details, and general settings to help us better help you if you get stuck -4. You can -# Connecting to an Existing Repository - -# Connecting to an Existing Repository that already has the GitHub for Unity package +# Cloning an Existing Repository +GitHub for Unity does not have the functionality to clone projects (yet!). +1. Clone the repository (either through command line or with GitHub Desktop https://desktop.github.com/). +2. Open the project in Unity. +3. Install GitHub for Unity if it is not already installed. +4. The GitHub plugin should load with all functionality enabled. \ No newline at end of file diff --git a/docs/using/how-to-install-and-update.md b/docs/using/how-to-install-and-update.md index 5f357cf79..fb9dd93d8 100644 --- a/docs/using/how-to-install-and-update.md +++ b/docs/using/how-to-install-and-update.md @@ -43,4 +43,9 @@ Once you've downloaded the package file, you can quickly install it within Unity screen shot 2018-05-18 at 7 13 34 am # Updating the GitHub for Unity Package -_COMING SOON_ + +- If you are running Unity and wish to update GitHub for Unity (unless explicitly stated), be sure that the files in `x64` and `x86` are not selected. + + ![image](https://user-images.githubusercontent.com/417571/44666907-6e6d5a80-a9e7-11e8-8f97-b3b52250a75d.png) + +- Otherwise, it's best to stop Unity and delete GitHub for Unity from your project. Startup Unity and run the package installer like normal. Allowing it to restore everything. diff --git a/docs/using/images/branches-initial-view.png b/docs/using/images/branches-initial-view.png new file mode 100644 index 000000000..811bcf724 --- /dev/null +++ b/docs/using/images/branches-initial-view.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:558821e67ea26955434da0485785e9240b6c15c44a3790b642fdcd37382e56a2 +size 59798 diff --git a/docs/using/images/changes-view.png b/docs/using/images/changes-view.png new file mode 100644 index 000000000..23000f8ef --- /dev/null +++ b/docs/using/images/changes-view.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:2ac4637a222a06e6ab57f23082b8e8b2f5cb9e35b9b99776c112ddcebb923dec +size 61918 diff --git a/docs/using/images/confirm-pull-changes.png b/docs/using/images/confirm-pull-changes.png new file mode 100644 index 000000000..722bbd639 --- /dev/null +++ b/docs/using/images/confirm-pull-changes.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:4b0e1aa1d8d86cb0264c9ca569a5c8e71b0aba73a2eb6611c06ec47670037d7d +size 21478 diff --git a/docs/using/images/confirm-push-changes.png b/docs/using/images/confirm-push-changes.png new file mode 100644 index 000000000..ff1ca1cd0 --- /dev/null +++ b/docs/using/images/confirm-push-changes.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:9e2e136027b03609f344bfde0c777d8e915544a5e9920f1268e4cc3abc5b1481 +size 18676 diff --git a/docs/using/images/confirm-revert.png b/docs/using/images/confirm-revert.png new file mode 100644 index 000000000..cbf5d1566 --- /dev/null +++ b/docs/using/images/confirm-revert.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:ba990ce12403bfb53fb387ca3f3e2c447b21814ed6cd7e6f88d968463a5253a8 +size 21973 diff --git a/docs/using/images/create-new-branch-view.png b/docs/using/images/create-new-branch-view.png new file mode 100644 index 000000000..34c69d1ea --- /dev/null +++ b/docs/using/images/create-new-branch-view.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f0239a51f611501dc63c828de206f35d412493871f65e712983666e67dbfc231 +size 62046 diff --git a/docs/using/images/delete-dialog.png b/docs/using/images/delete-dialog.png new file mode 100644 index 000000000..574489f7c --- /dev/null +++ b/docs/using/images/delete-dialog.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:99d7cd8ef4957614cc844a08a632159157295414dc4675c1394f3209e30ffa58 +size 76315 diff --git a/docs/using/images/github-authenticate.png b/docs/using/images/github-authenticate.png new file mode 100644 index 000000000..188121d97 --- /dev/null +++ b/docs/using/images/github-authenticate.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:31ca0f7c4fc5c737a3db2da6eeb3deaa862d27e2cf3d1aa78fa14555b7cac750 +size 5927 diff --git a/docs/using/images/github-menu-item.png b/docs/using/images/github-menu-item.png new file mode 100644 index 000000000..44f3f9458 --- /dev/null +++ b/docs/using/images/github-menu-item.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c423766a70230c245f4bcdb1e33d9e76f267bf789876ecf9050016f9c8671735 +size 19651 diff --git a/docs/using/images/github-sign-in-button.png b/docs/using/images/github-sign-in-button.png new file mode 100644 index 000000000..cb6132c24 --- /dev/null +++ b/docs/using/images/github-sign-in-button.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:79bea09964bcf1964d4d1e1fb9712fe9fc67dc8ffe11f936534c9f2be63fd777 +size 9318 diff --git a/docs/using/images/github-two-factor.png b/docs/using/images/github-two-factor.png new file mode 100644 index 000000000..7a3286616 --- /dev/null +++ b/docs/using/images/github-two-factor.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:aba2174f780ea52a7cf7bbfd5144527bb7b29c3ce6a3200b6b6f8c69b2d6fb48 +size 9665 diff --git a/docs/using/images/locked-scene.png b/docs/using/images/locked-scene.png new file mode 100644 index 000000000..7a839141d --- /dev/null +++ b/docs/using/images/locked-scene.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:bbc246b18cbd173c19f392fe7ab85bffb8a97a7616ef99e7e37496e307924e49 +size 51983 diff --git a/docs/using/images/locks-view-right-click.png b/docs/using/images/locks-view-right-click.png new file mode 100644 index 000000000..0ee14312c --- /dev/null +++ b/docs/using/images/locks-view-right-click.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:590dae09b3c029e48c3703d6c85a6774b062ebea46f11c2c062d97fedcb436df +size 27251 diff --git a/docs/using/images/locks-view.png b/docs/using/images/locks-view.png new file mode 100644 index 000000000..e8790d74e --- /dev/null +++ b/docs/using/images/locks-view.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:118f2b2e1bb82b598107eaa73db9a077374ff4f9601e47677eca7296353b1bc3 +size 23274 diff --git a/docs/using/images/name-branch.png b/docs/using/images/name-branch.png new file mode 100644 index 000000000..9926c06db --- /dev/null +++ b/docs/using/images/name-branch.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:08232fa5cfd41a41c8091197ac27f433fdfa865c4996388dd0c731ac75176442 +size 59599 diff --git a/docs/using/images/new-branch-created.png b/docs/using/images/new-branch-created.png new file mode 100644 index 000000000..2a37a9aeb --- /dev/null +++ b/docs/using/images/new-branch-created.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8543da247ee23364c15abec87e4e3de3e62b85a470e7d3d090ade18fb321072c +size 64045 diff --git a/docs/using/images/post-commit-view.png b/docs/using/images/post-commit-view.png new file mode 100644 index 000000000..81fc7d135 --- /dev/null +++ b/docs/using/images/post-commit-view.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:91e26123236bdd3e692ca7d30ff0d6477311fe739d8008ba19fd543ee3506176 +size 21345 diff --git a/docs/using/images/post-push-history-view.png b/docs/using/images/post-push-history-view.png new file mode 100644 index 000000000..0d2ed767a --- /dev/null +++ b/docs/using/images/post-push-history-view.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c0c2d5ccaddc88d3268e0d57f02229525240ecc71bded951e23171456560268b +size 22485 diff --git a/docs/using/images/pull-view.png b/docs/using/images/pull-view.png new file mode 100644 index 000000000..842ff9660 --- /dev/null +++ b/docs/using/images/pull-view.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:feb83a46ebdd34ff46db3a38e8829f5adec5b10824bf69a8c56c9ba6fbd603ff +size 25054 diff --git a/docs/using/images/push-view.png b/docs/using/images/push-view.png new file mode 100644 index 000000000..42bef651a --- /dev/null +++ b/docs/using/images/push-view.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:ab87bae0916af3681b7528891a359c77597fa3ea57f13eb6924f1cbcf2f2ab7d +size 14581 diff --git a/docs/using/images/release-lock.png b/docs/using/images/release-lock.png new file mode 100644 index 000000000..545b81c8a --- /dev/null +++ b/docs/using/images/release-lock.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:0f6e05a714d270016d1f8ad4e018a895ed2b023fe59bd34a28df8d315c065649 +size 140913 diff --git a/docs/using/images/request-lock.png b/docs/using/images/request-lock.png new file mode 100644 index 000000000..b877e1d8f --- /dev/null +++ b/docs/using/images/request-lock.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:3f25877141bed2dcbdc4324edcbd802b6f216771e80de704f20dc99a3fdc718b +size 136078 diff --git a/docs/using/images/revert-commit.png b/docs/using/images/revert-commit.png new file mode 100644 index 000000000..daad75c8c --- /dev/null +++ b/docs/using/images/revert-commit.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b7711f428f3e04ea056c47b5d43109f3941761f4c286360d3616e0598d2d7b75 +size 26119 diff --git a/docs/using/images/revert.png b/docs/using/images/revert.png new file mode 100644 index 000000000..326071a28 --- /dev/null +++ b/docs/using/images/revert.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:dea7b36eec8e2d99e1906b6cbc9cd22f52d2db09cc1a0cf34563585720b6649d +size 25141 diff --git a/docs/using/images/success-pull-changes.png b/docs/using/images/success-pull-changes.png new file mode 100644 index 000000000..a246da405 --- /dev/null +++ b/docs/using/images/success-pull-changes.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e0407ab83ca311f319ff7feac0dac84b862935e2b50353ff82c067bdadf80e16 +size 16888 diff --git a/docs/using/images/success-push-changes.png b/docs/using/images/success-push-changes.png new file mode 100644 index 000000000..8fba1f506 --- /dev/null +++ b/docs/using/images/success-push-changes.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:cfd290cb1e52b0acb5503642a3aaef164bf343735729325484e25d8b99293d7e +size 13262 diff --git a/docs/using/images/switch-confirmation.png b/docs/using/images/switch-confirmation.png new file mode 100644 index 000000000..92d623d6c --- /dev/null +++ b/docs/using/images/switch-confirmation.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:4ed80f5cf2fab5fd256dd7cd093ad505c966edb95bdc5a15c3f1ace042bec7f6 +size 73947 diff --git a/docs/using/images/switch-or-delete.png b/docs/using/images/switch-or-delete.png new file mode 100644 index 000000000..deab6badf --- /dev/null +++ b/docs/using/images/switch-or-delete.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8c0d171b2e8f7e212f2446b416235c6232c57c9b4927cfa41891bffa1940ae9d +size 73828 diff --git a/docs/using/images/switched-branches.png b/docs/using/images/switched-branches.png new file mode 100644 index 000000000..c185d25c5 --- /dev/null +++ b/docs/using/images/switched-branches.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:542806a8dcc012cdfadadaf9cdab3cf4871bbbbee2c8d73834946df4ed249aec +size 64281 diff --git a/docs/using/locking-files.md b/docs/using/locking-files.md new file mode 100644 index 000000000..f90c8e7a3 --- /dev/null +++ b/docs/using/locking-files.md @@ -0,0 +1,30 @@ +# Locking files + +## Request locks + +From the Project tab, right-click on a file to open the context menu and select `Request Lock`. + + +An additional way to lock a file is by selecting it and going to `Assets` -> `Request Lock`. + +## View locks + +After requesting a lock, a lock icon appears in the bottom right-hand corner of the file. + + +A list of all locked files will appear in the **Locks** view in the GitHub tab. + + +## Release locks + +There are three ways to release locks: + +1. From the Project tab, right-click on the locked file to open the context menu and select the option to `Release Lock`. + + +2. From the GitHub tab under the **Locks** view, right-click to open the context menu and select to `Release Lock`. + + +3. Select the file to unlock and go to select the menu option `Assets` -> `Release Lock`. + +Note: There are also two options for how to release a lock on a file. Always choose the `Release Lock` option first. The `Release Lock (forced)` option can be used to remove someone else's lock. diff --git a/docs/using/managing-branches.md b/docs/using/managing-branches.md new file mode 100644 index 000000000..13fa9c1bd --- /dev/null +++ b/docs/using/managing-branches.md @@ -0,0 +1,38 @@ +# Managing branches + +Initial **Branches** view + +Post commit view + +## Create branch + +1. From the **Branches** view, click on `master` under local branches to enable the `New Branch` button and be able to create a new branch from master. +2. Click on `New Branch`. +Post commit view + +3. Enter a name for the branch and click `Create`. +Post commit view + +4. The new branch will be created from master. +Post commit view + +## Checkout branch + +1. Right-click on a local branch and select `Switch` or double-click on the branch to switch to it. +Post commit view + +2. A dialog will appear asking `Switch branch to 'branch name'?`. Select `Switch`. +Post commit view + +The branch will be checked out. +Post commit view + +## Delete branches + +1. Click on the branch name to be deleted and the `Delete` button becomes enabled. +2. Right-click on a local branch and select `Delete` or click the `Delete` button above the Local branches list. +Post commit view + +3. A dialog appears asking `Are you sure you want to delete the branch: 'branch name'?`. Select `Delete`. + +The branch will be deleted. diff --git a/docs/using/quick-guide.md b/docs/using/quick-guide.md new file mode 100644 index 000000000..2011ee40f --- /dev/null +++ b/docs/using/quick-guide.md @@ -0,0 +1,164 @@ +# Quick Guide + +## More resources + +These documents contain more details on how to use the GitHub for Unity plugin: +- **[Installing and Updating the GitHub for Unity package](https://github.com/github-for-unity/Unity/blob/master/docs/using/how-to-install-and-update.md)** +- **[Getting Started with the GitHub for Unity package](https://github.com/github-for-unity/Unity/blob/master/docs/using/getting-started.md)** +- **[Authenticating to GitHub](https://github.com/github-for-unity/Unity/blob/master/docs/using/authenticating-to-github.md)** +- **[Managing Branches](https://github.com/github-for-unity/Unity/blob/master/docs/using/managing-branches.md)** +- **[Locking Files](https://github.com/github-for-unity/Unity/blob/master/docs/using/locking-files.md)** +- **[Working with Changes](https://github.com/github-for-unity/Unity/blob/master/docs/using/working-with-changes.md)** +- **[Using the Api](https://github.com/github-for-unity/Unity/blob/master/docs/using/using-the-api.md)** + +## Table of Contents + +[Installing GitHub for Unity](#installing-github-for-unity) + +- [Requirements](#requirements) + - [Git on macOS](#git-on-macos) + - [Git on Windows](#git-on-windows) +- [Installation](#installation) +- [Log files](#log-files) + - [Windows](#windows) + - [macOS](#macos) + +[Quick Guide to GitHub for Unity](#quick-guide-to-github-for-unity) + +- [Opening the GitHub window](#opening-the-github-window) +- [Initialize Repository](#initialize-repository) +- [Authentication](#authentication) +- [Publish a new repository](#publish-a-new-repository) +- [Commiting your work - Changes tab](#commiting-your-work---changes-tab) +- [Pushing/pulling your work - History tab](#pushingpulling-your-work---history-tab) +- [Branches tab](#branches-tab) +- [Settings tab](#settings-tab) + +## Installing GitHub for Unity + +### Requirements + +- Unity 5.4 or higher + - There's currently a blocker issue opened for 5.3 support, so we know it doesn't run there. Personal edition is fine. +- Git and Git LFS 2.x + +#### Git on macOS + +The current release has limited macOS support. macOS users will need to install the latest [Git](https://git-scm.com/downloads) and [Git LFS](https://git-lfs.github.com/) manually, and make sure these are on the path. You can configure the Git location in the `Settings` tab on the GitHub window. + +The easiest way of installing git and git lfs is to install [Homebrew](https://brew.sh/) and then do `brew install git git-lfs`. + +Make sure a Git user and email address are set in the `~/.gitconfig` file before you initialize a repository for the first time. You can set these values by opening your `~/.gitconfig` file and adding the following section, if it doesn't exist yet: + +``` +[user] + name = Your Name + email = Your Email +``` + +#### Git on Windows + +The GitHub for Unity extension ships with a bundle of Git and Git LFS, to ensure that you have the correct version. These will be installed into `%LOCALAPPDATA%\GitHubUnity` when the extension runs for the first time. + +Make sure a Git user and email address are set in the `%HOME%\.gitconfig` file before you initialize a repository for the first time. You can set these values by opening your `%HOME%\.gitconfig` file and adding the following section, if it doesn't exist yet: + +``` +[user] + name = Your Name + email = Your Email +``` + +Once the extension is installed, you can open a command line with the same Git and Git LFS version that the extension uses by going to `Window` -> `GitHub Command Line` in Unity. + +### Installation + +This extensions needs to be installed (and updated) for each Unity project that you want to version control. +First step is to download the latest package from [the releases page](https://github.com/github-for-unity/Unity/releases); it will be saved as a file with the extension `.unitypackage`. +To install it, open Unity, then open the project you want to version control, and then double click on the downloaded package. +Alternatively, import the package by clicking `Assets`, `Import Package`, `Custom Package`, then select the downloaded package. + +#### Log files + +##### macOS + +The extension log file can be found at `~/Library/Logs/GitHubUnity/github-unity.log` + +##### Windows + +The extension log file can be found at `%LOCALAPPDATA%\GitHubUnity\github-unity.log` + +## I have a problem with GitHub for Unity + +First, please search the [open issues](https://github.com/github-for-unity/Unity/issues?q=is%3Aopen) +and [closed issues](https://github.com/github-for-unity/Unity/issues?q=is%3Aclosed) +to see if your issue hasn't already been reported (it may also be fixed). + +If you can't find an issue that matches what you're seeing, open a [new issue](https://github.com/github-for-unity/Unity/issues/new) +and fill out the template to provide us with enough information to investigate +further. + +## Quick Guide to GitHub for Unity + +### Opening the GitHub window + +You can access the GitHub window by going to `Windows` -> `GitHub`. The window opens by default next to the Inspector window. + +### Initialize Repository + +![Initialize repository screenshot](https://user-images.githubusercontent.com/10103121/37807041-bb4446a6-2e19-11e8-9fff-a431309b8515.png) + +If the current Unity project is not in a Git repository, the GitHub for Unity extension will offer to initialize the repository for you. This will: + +- Initialize a git repository at the Unity project root via `git init` +- Initialize git-lfs via `git lfs install` +- Set up a `.gitignore` file at the Unity project root. +- Set up a `.gitattributes` file at the Unity project root with a large list of known binary filetypes (images, audio, etc) that should be tracked by LFS +- Configure the project to serialize meta files as text +- Create an initial commit with the `.gitignore` and `.gitattributes` file. + +### Authentication + +To set up credentials in Git so you can push and pull, you can sign in to GitHub by going to `Window` -> `GitHub` -> `Account` -> `Sign in`. You only have to sign in successfully once, your credentials will remain on the system for all Git operations in Unity and outside of it. If you've already signed in once but the Account dropdown still says `Sign in`, ignore it, it's a bug. + +![Authentication screenshot](https://user-images.githubusercontent.com/121322/27644895-8f22f904-5bd9-11e7-8a93-e6bfe0c24a74.png) + +For more information on Authentication: - **[Authenticating to GitHub](https://github.com/github-for-unity/Unity/blob/master/docs/using/authenticating-to-github.md)** + +### Publish a new repository + +1. Go to [github.com](https://github.com) and create a new empty repository - do not add a license, readme or other files during the creation process. +2. Copy the **https** URL shown in the creation page +3. In Unity, go to `Windows` -> `GitHub` -> `Settings` and paste the url into the `Remote` textbox. +4. Click `Save repository`. +5. Go to the `History` tab and click `Push`. + +### Commiting your work - Changes tab + +You can see which files have been changed and commit them through the `Changes` tab. `.meta` files will show up in relation to their files on the tree, so you can select a file for comitting and automatically have their `.meta` + +![Changes tab screenshot](https://user-images.githubusercontent.com/121322/27644933-ab00af72-5bd9-11e7-84c3-edec495f87f5.png) + +For more information on working with changes: - **[Working with Changes](https://github.com/github-for-unity/Unity/blob/master/docs/using/working-with-changes.md#commit-changes)** + +### Pushing/pulling your work - History tab + +The history tab includes a `Push` button to push your work to the server. Make sure you have a remote url configured in the `Settings` tab so that you can push and pull your work. + +To receive updates from the server by clicking on the `Pull` button. You cannot pull if you have local changes, so commit your changes before pulling. + +![History tab screenshot](https://user-images.githubusercontent.com/121322/27644965-c1109bba-5bd9-11e7-9257-4fa38f5c67d1.png) + + +For more information on working with changes: - **[Working with Changes](https://github.com/github-for-unity/Unity/blob/master/docs/using/working-with-changes.md#pulling-changes)** + +### Branches tab + +![Branches tab screenshot](https://user-images.githubusercontent.com/121322/27644978-cd3c5622-5bd9-11e7-9dcb-6ae5d5c7dc8a.png) + +### Settings tab + +You can configure your user data in the `Settings` tab, along with the path to the Git installation. + +Locked files will appear in a list in the Settings tab. You can see who has locked a file and release file locks after you've pushed your work. + +![Settings tab screenshot](https://user-images.githubusercontent.com/121322/27644993-d9d325a0-5bd9-11e7-86f5-beee00e9e8b8.png) diff --git a/docs/using/using-the-api.md b/docs/using/using-the-api.md new file mode 100644 index 000000000..ab75cab55 --- /dev/null +++ b/docs/using/using-the-api.md @@ -0,0 +1,75 @@ +# Using the API + +GitHub for Unity provides access to a git client to help users create their own tools to assist in their workflow. + +Users can separate the user interface from the API by removing `GitHub.Unity.dll`. All other libraries are required by the API. + +## Creating an instance of `GitClient` +```cs +var defaultEnvironment = new DefaultEnvironment(); +defaultEnvironment.Initialize(null, NPath.Default, NPath.Default, NPath.Default, Application.dataPath.ToNPath()); + +var processEnvironment = new ProcessEnvironment(defaultEnvironment); +var processManager = new ProcessManager(defaultEnvironment, processEnvironment, TaskManager.Instance.Token); + +var gitClient = new GitClient(defaultEnvironment, processManager, TaskManager.Instance.Token); +``` + +## Full Example +This example creates a window that has a single button which commits all changes. +```cs +using System; +using System.Globalization; +using GitHub.Unity; +using UnityEditor; +using UnityEngine; + +public class CustomGitEditor : EditorWindow +{ + [MenuItem("Window/Custom Git")] + public static void ShowWindow() + { + EditorWindow.GetWindow(typeof(CustomGitEditor)); + } + + [NonSerialized] private GitClient gitClient; + + public void OnEnable() + { + InitGitClient(); + } + + private void InitGitClient() + { + if (gitClient != null) return; + + Debug.Log("Init GitClient"); + + var defaultEnvironment = new DefaultEnvironment(); + defaultEnvironment.Initialize(null, NPath.Default, NPath.Default, + NPath.Default, Application.dataPath.ToNPath()); + + var processEnvironment = new ProcessEnvironment(defaultEnvironment); + var processManager = new ProcessManager(defaultEnvironment, processEnvironment, TaskManager.Instance.Token); + + gitClient = new GitClient(defaultEnvironment, processManager, TaskManager.Instance.Token); + } + + void OnGUI() + { + GUILayout.Label("Custom Git Window", EditorStyles.boldLabel); + + if (GUILayout.Button("Commit Stuff")) + { + var message = DateTime.Now.ToString(CultureInfo.InvariantCulture); + var body = string.Empty; + + gitClient.AddAll() + .Then(gitClient.Commit(message, body)) + .Start(); + } + } +} +``` + + diff --git a/docs/using/working-with-changes.md b/docs/using/working-with-changes.md new file mode 100644 index 000000000..228e4bd3f --- /dev/null +++ b/docs/using/working-with-changes.md @@ -0,0 +1,53 @@ +# Working with changes + +## Commit changes + +All changes made to a repository will show up under the **Changes** view. + +1. Select the changes to be committed. Can choose the All/None options, or select directories or files individually. +2. Enter a Commit summary which describes the purpose of the commit. An optional Commit description can also be entered. +3. Click the button `Commit to [branch name]`. +Changes view + +The commit will not be shown under the **History** view. On the top bar the button `Push (1)` indicates that there is 1 commit to push. + +Post commit view + +## Push changes to GitHub + +1. Click `Push` once ready to push a commit to GitHub. +Push view + +2. A dialog will appear asking `Would you like to push changes to remote 'branch name'?` Select `Push`. +Confirm push dialog + +3. Another dialog will appear when the push to GitHub is complete saying `Branch pushed`. Select `ok`. +Branch pushed + +## Revert changes + +1. From the **History** view, right-click on a commit in the commit list. A `Revert` option will appear. +2. Click `Revert`. +Revert + +3. A dialog will appear asking `Are you sure you want to revert the following commit: "commit message"?`. Select `Revert`. +Confirm revert dialog + +4. A new commit appears titled `Revert "commit summary"` and the view indicates that there is 1 commit to push. +Revert commit + +5. Follow the steps to push the reverted commit to GitHub. + +## Pulling changes + +1. Click the `Fetch` button to get all the latest branches and tags for the repository. The `Pull` button will then show the number of commits to pull from GitHub. +2. Click `Pull`. +Pull changes + +3. A dialog will appear asking `Would you like to pull changes from remote 'branch name'?`. Select `Pull`. +Confirm pull changes dialog + +4. Another dialog appears saying `Local branch is up to date with 'branch name'`. Select `ok`. +Changes pulled + + diff --git a/lib/ICSharpCode.NRefactory.dll b/lib/ICSharpCode.NRefactory.dll deleted file mode 100644 index f11688c6c..000000000 --- a/lib/ICSharpCode.NRefactory.dll +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:2e28e35172498877c7e4a33253b0c5df172bce5655b2ab933a922d6777ae5e6d -size 528384 diff --git a/lib/ICSharpCode.SharpZipLib.dll b/lib/ICSharpCode.SharpZipLib.dll deleted file mode 100644 index 108abfd2a..000000000 --- a/lib/ICSharpCode.SharpZipLib.dll +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:fbace48694fbfff69ff99ff7aefbde67965f7723092df9c13ff537d2a319f410 -size 192000 diff --git a/lib/pdb2mdb.exe b/lib/pdb2mdb.exe new file mode 100644 index 000000000..72547bf3c --- /dev/null +++ b/lib/pdb2mdb.exe @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a72425f98bdb5923704946c4010485d471ce9e35611264f394637fb185424619 +size 369664 diff --git a/nuget.config b/nuget.config index 1a5bd9cf3..bad252e01 100644 --- a/nuget.config +++ b/nuget.config @@ -1,7 +1,7 @@ - + diff --git a/octorun/bin/octorun-meta b/octorun/bin/octorun-meta new file mode 100644 index 000000000..1d2d3a7c9 --- /dev/null +++ b/octorun/bin/octorun-meta @@ -0,0 +1,3 @@ +#!/usr/bin/env node + +require('../src/bin/app-meta.js'); diff --git a/octorun/bin/octorun-token b/octorun/bin/octorun-token new file mode 100644 index 000000000..3ea7b1500 --- /dev/null +++ b/octorun/bin/octorun-token @@ -0,0 +1,3 @@ +#!/usr/bin/env node + +require('../src/bin/app-token.js'); diff --git a/octorun/src/api.js b/octorun/src/api.js index fcaf96c50..f7a672e39 100644 --- a/octorun/src/api.js +++ b/octorun/src/api.js @@ -1,17 +1,17 @@ var config = require("./configuration"); var octokitWrapper = require("./octokit"); -function ApiWrapper() { - this.octokit = octokitWrapper.createOctokit(); - - if (!config.user || !config.token) { - throw "user and/or token missing"; - } - +function ApiWrapper(host) { if (!config.appName) { throw "appName missing"; } + if (!config.token) { + throw "token missing"; + } + + this.octokit = octokitWrapper.createOctokit(config.appName, host); + this.octokit.authenticate({ type: "oauth", token: config.token diff --git a/octorun/src/authentication.js b/octorun/src/authentication.js index 43a2de5bb..4147522cd 100644 --- a/octorun/src/authentication.js +++ b/octorun/src/authentication.js @@ -1,12 +1,11 @@ -var endOfLine = require('os').EOL; var config = require("./configuration"); var octokitWrapper = require("./octokit"); var twoFactorRegex = new RegExp("must specify two-factor authentication otp code", "gi"); -var scopes = ["user", "repo", "gist", "write:public_key"]; +var scopes = ["user", "repo"]; -var handleAuthentication = function (username, password, onSuccess, onFailure, twoFactor) { +var handleAuthentication = function (username, password, onSuccess, onFailure, twoFactor, host) { if (!config.clientId || !config.clientSecret) { throw "clientId and/or clientSecret missing"; } @@ -15,7 +14,7 @@ var handleAuthentication = function (username, password, onSuccess, onFailure, t throw "appName missing"; } - var octokit = octokitWrapper.createOctokit(); + var octokit = octokitWrapper.createOctokit(config.appName, host); octokit.authenticate({ type: "basic", @@ -27,7 +26,6 @@ var handleAuthentication = function (username, password, onSuccess, onFailure, t if (twoFactor) { headers = { "X-GitHub-OTP": twoFactor, - "user-agent": config.appName }; } diff --git a/octorun/src/bin/app-login.js b/octorun/src/bin/app-login.js index f3484c31b..4577bac81 100644 --- a/octorun/src/bin/app-login.js +++ b/octorun/src/bin/app-login.js @@ -1,12 +1,12 @@ var commander = require("commander"); var package = require('../../package.json'); var authentication = require('../authentication'); -var endOfLine = require('os').EOL; var output = require('../output'); commander .version(package.version) .option('-t, --twoFactor') + .option('-h, --host ') .parse(process.argv); var handleAuthentication = function (username, password, twoFactor) { @@ -19,7 +19,7 @@ var handleAuthentication = function (username, password, twoFactor) { } }, function (error) { output.error(error); - }, twoFactor); + }, twoFactor, commander.host); } var encoding = 'utf-8'; diff --git a/octorun/src/bin/app-meta.js b/octorun/src/bin/app-meta.js new file mode 100644 index 000000000..ecbc36e66 --- /dev/null +++ b/octorun/src/bin/app-meta.js @@ -0,0 +1,52 @@ +var commander = require('commander'); +var package = require('../../package.json'); +var output = require('../output'); + +commander + .version(package.version) + .option('-h, --host ') + .parse(process.argv); + +var host = commander.host; +var port = 443; +var scheme = 'https'; + +if (host) { + var https = require(scheme); + var options = { + protocol: scheme + ':', + hostname: host, + port: port, + path: '/api/v3/meta', + method: 'GET', + headers: { + 'Content-Type': 'application/json' + } + }; + + var req = https.request(options, function (res) { + var success = res.statusCode == 200; + + if(!success) { + output.error(res.statusCode); + } else { + res.on('data', function (d) { + output.custom("success", d, true); + }); + + res.on('end', function (d) { + process.exit(); + }); + } + }); + + req.on('error', function (error) { + output.error(error); + }); + + req.end(); +} +else { + commander.help(); + process.exit(-1); +} \ No newline at end of file diff --git a/octorun/src/bin/app-organizations.js b/octorun/src/bin/app-organizations.js index 480289aa1..1a67e181f 100644 --- a/octorun/src/bin/app-organizations.js +++ b/octorun/src/bin/app-organizations.js @@ -1,16 +1,15 @@ var commander = require("commander"); var package = require('../../package.json'); var ApiWrapper = require('../api'); -var endOfLine = require('os').EOL; var output = require('../output'); commander .version(package.version) + .option('-h, --host ') .parse(process.argv); try { - - var apiWrapper = new ApiWrapper(); + var apiWrapper = new ApiWrapper(commander.host); apiWrapper.getOrgs(function (error, result) { if (error) { output.error(error); diff --git a/octorun/src/bin/app-publish.js b/octorun/src/bin/app-publish.js index 5fe602390..62305763e 100644 --- a/octorun/src/bin/app-publish.js +++ b/octorun/src/bin/app-publish.js @@ -1,7 +1,6 @@ var commander = require("commander"); var package = require('../../package.json') var ApiWrapper = require('../api') -var endOfLine = require('os').EOL; var output = require('../output'); commander @@ -10,6 +9,7 @@ commander .option('-d, --description ') .option('-o, --organization ') .option('-p, --private') + .option('-h, --host ') .parse(process.argv); if(!commander.repository) @@ -24,7 +24,7 @@ if (commander.private) { } try { - var apiWrapper = new ApiWrapper(); + var apiWrapper = new ApiWrapper(commander.host); apiWrapper.publish(commander.repository, commander.description, private, commander.organization, function (error, result) { diff --git a/octorun/src/bin/app-token.js b/octorun/src/bin/app-token.js new file mode 100644 index 000000000..5811be43e --- /dev/null +++ b/octorun/src/bin/app-token.js @@ -0,0 +1,66 @@ + +var commander = require('commander'); +var package = require('../../package.json'); +var output = require('../output'); +var config = require("../configuration"); +var querystring = require('querystring'); + +commander + .version(package.version) + .option('-h, --host ') + .parse(process.argv); + +var host = commander.host; +var port = 443; +var scheme = 'https'; + +var valid = host && config.clientId && config.clientSecret && config.token; +if (valid) { + var https = require(scheme); + + var postData = querystring.stringify({ + client_id: config.clientId, + client_secret: config.clientSecret, + code: config.token + }); + + var options = { + protocol: scheme + ':', + hostname: host, + port: port, + path: '/login/oauth/access_token', + method: 'POST', + headers: { + 'Content-Type': 'application/x-www-form-urlencoded', + 'Content-Length': postData.length + } + }; + + var req = https.request(options, function (res) { + var success = res.statusCode == 200; + + if(!success) { + output.error(res.statusCode); + } else { + res.on('data', function (d) { + output.custom("success", d, true); + }); + + res.on('end', function (d) { + process.exit(); + }); + } + }); + + req.on('error', function (error) { + output.error(error); + }); + + req.write(postData); + + req.end(); +} +else { + commander.help(); + process.exit(-1); +} \ No newline at end of file diff --git a/octorun/src/bin/app-usage.js b/octorun/src/bin/app-usage.js index 9f6811d15..0fb32b691 100644 --- a/octorun/src/bin/app-usage.js +++ b/octorun/src/bin/app-usage.js @@ -1,9 +1,7 @@ -var commander = require("commander"); -var package = require('../../package.json') -var config = require("../configuration"); -var endOfLine = require('os').EOL; +var commander = require('commander'); +var package = require('../../package.json'); +var config = require('../configuration'); var fs = require('fs'); -var util = require('util'); var output = require('../output'); commander @@ -46,9 +44,6 @@ if (fileContents && host) { 'Content-Type': 'application/json' } }; - if (config.token) { - options.headers['Authorization'] = 'token ' + config.token; - } var req = https.request(options, function (res) { var success = res.statusCode == 200; diff --git a/octorun/src/bin/app-validate.js b/octorun/src/bin/app-validate.js index 8ba643021..294fbcbdc 100644 --- a/octorun/src/bin/app-validate.js +++ b/octorun/src/bin/app-validate.js @@ -1,15 +1,15 @@ var commander = require("commander"); var package = require('../../package.json'); -var endOfLine = require('os').EOL; var ApiWrapper = require('../api'); var output = require('../output'); commander .version(package.version) + .option('-h, --host ') .parse(process.argv); try { - var apiWrapper = new ApiWrapper(); + var apiWrapper = new ApiWrapper(commander.host); apiWrapper.verifyUser(function (error, result) { if (error) { diff --git a/octorun/src/bin/app.js b/octorun/src/bin/app.js index e40d738b2..095292965 100644 --- a/octorun/src/bin/app.js +++ b/octorun/src/bin/app.js @@ -9,4 +9,6 @@ commander .command('organizations', 'Get Organizations') .command('publish', 'Publish') .command('usage', 'Usage') + .command('token', 'Create OAuth Token') + .command('meta', 'Get Server Meta Data') .parse(process.argv); \ No newline at end of file diff --git a/octorun/src/configuration.js b/octorun/src/configuration.js index f9462acde..0fe3906fd 100644 --- a/octorun/src/configuration.js +++ b/octorun/src/configuration.js @@ -3,13 +3,11 @@ require("dotenv").config({silent: true}); var clientId = process.env.OCTOKIT_CLIENT_ID; var clientSecret = process.env.OCTOKIT_CLIENT_SECRET; var appName = process.env.OCTOKIT_USER_AGENT; -var user = process.env.OCTORUN_USER; var token = process.env.OCTORUN_TOKEN; module.exports = { clientId: clientId, clientSecret: clientSecret, appName: appName, - user: user, token: token }; \ No newline at end of file diff --git a/octorun/src/octokit.js b/octorun/src/octokit.js index 1cf90b1ac..b0ab0a42f 100644 --- a/octorun/src/octokit.js +++ b/octorun/src/octokit.js @@ -1,19 +1,20 @@ var Octokit = require('octokit-rest-for-node-v0.12'); -var createOctokit = function () { - return Octokit({ +var createOctokit = function (appName, host) { + var octokitConfiguration = { timeout: 0, requestMedia: 'application/vnd.github.v3+json', headers: { - 'user-agent': 'octokit/rest.js v1.2.3' + 'user-agent': appName } + }; - // change for custom GitHub Enterprise URL - //host: 'api.github.com', - //pathPrefix: '', - //protocol: 'https', - //port: 443 - }); + if (host) { + octokitConfiguration.host = host; + octokitConfiguration.pathPrefix = 'api/v3'; + } + + return Octokit(octokitConfiguration); }; module.exports = { createOctokit: createOctokit }; \ No newline at end of file diff --git a/octorun/version b/octorun/version index b2dfd9b3b..998379c47 100644 --- a/octorun/version +++ b/octorun/version @@ -1 +1 @@ -b4b80eb4ac \ No newline at end of file +902910f48 \ No newline at end of file diff --git a/package.cmd b/package.cmd index e25ddbd41..a47264d8f 100644 --- a/package.cmd +++ b/package.cmd @@ -47,7 +47,7 @@ if not exist "%Unity%" ( del /Q unity\PackageProject\Assets\Plugins\GitHub\Editor\*.pdb.meta del /Q unity\PackageProject\Assets\Plugins\GitHub\Editor\*.xml - for /f tokens^=^2^ usebackq^ delims^=^" %%G in (`find "const string Version" common\SolutionInfo.cs`) do call :Package %%G + for /f tokens^=^2^ usebackq^ delims^=^" %%G in (`find "const string GitHubForUnityVersion" common\SolutionInfo.cs`) do call :Package %%G goto End diff --git a/package.sh b/package.sh index ca09bd5e5..8dc3c93dc 100755 --- a/package.sh +++ b/package.sh @@ -58,7 +58,7 @@ rm -f unity/PackageProject/Assets/Plugins/GitHub/Editor/*.pdb rm -f unity/PackageProject/Assets/Plugins/GitHub/Editor/*.pdb.meta rm -f unity/PackageProject/Assets/Plugins/GitHub/Editor/*.xml -Version=`sed -En 's,.*Version = "(.*)".*,\1,p' common/SolutionInfo.cs` +Version=`sed -En 's,.*GitHubForUnityVersion = "(.*)".*,\1,p' common/SolutionInfo.cs` commitcount=`git rev-list --count HEAD` commit=`git log -n1 --pretty=format:%h` Version="${Version}.${commitcount}-${commit}" diff --git a/script b/script index 259dba7e8..d373977da 160000 --- a/script +++ b/script @@ -1 +1 @@ -Subproject commit 259dba7e8375a96a935b51e2169fe029cd70c039 +Subproject commit d373977da73bdf7f9170e778638c80e5b49ca3b3 diff --git a/src/.gitignore b/src/.gitignore index f8cebfc72..a7abc4e4a 100644 --- a/src/.gitignore +++ b/src/.gitignore @@ -312,4 +312,5 @@ sysinfo.txt # Builds *.apk *.unitypackage -UnityExtension/**/manifest.json \ No newline at end of file +UnityExtension/**/manifest.json +tests/IntegrationTests/IOTestsRepo/ \ No newline at end of file diff --git a/src/GitHub.Api/Application/ApiClient.cs b/src/GitHub.Api/Application/ApiClient.cs index 73055228b..97a26797a 100644 --- a/src/GitHub.Api/Application/ApiClient.cs +++ b/src/GitHub.Api/Application/ApiClient.cs @@ -1,38 +1,48 @@ using System; using System.Collections.Generic; using System.Linq; +using System.Net; using GitHub.Logging; using System.Runtime.Serialization; using System.Text; +using System.Text.RegularExpressions; +using System.Threading; +using GitHub.Unity.Json; namespace GitHub.Unity { - class ApiClient : IApiClient + public class ApiClient : IApiClient { private static readonly ILogging logger = LogHelper.GetLogger(); + private static readonly Regex httpStatusErrorRegex = new Regex("(?<=[a-z])([A-Z])", RegexOptions.Compiled); + private static readonly Regex accessTokenRegex = new Regex("access_token=(.*?)&", RegexOptions.Compiled); + public HostAddress HostAddress { get; } - public UriString OriginalUrl { get; } private readonly IKeychain keychain; private readonly IProcessManager processManager; private readonly ITaskManager taskManager; - private readonly NPath nodeJsExecutablePath; - private readonly NPath octorunScriptPath; private readonly ILoginManager loginManager; + private readonly IEnvironment environment; + private IKeychainAdapter keychainAdapter; + private Connection connection; - public ApiClient(UriString hostUrl, IKeychain keychain, IProcessManager processManager, ITaskManager taskManager, NPath nodeJsExecutablePath, NPath octorunScriptPath) + public ApiClient(IKeychain keychain, IProcessManager processManager, ITaskManager taskManager, + IEnvironment environment, UriString host = null) { - Guard.ArgumentNotNull(hostUrl, nameof(hostUrl)); Guard.ArgumentNotNull(keychain, nameof(keychain)); - HostAddress = HostAddress.Create(hostUrl); - OriginalUrl = hostUrl; + host = host == null + ? UriString.ToUriString(HostAddress.GitHubDotComHostAddress.WebUri) + : new UriString(host.ToRepositoryUri().GetComponents(UriComponents.SchemeAndServer, UriFormat.SafeUnescaped)); + + HostAddress = HostAddress.Create(host); + this.keychain = keychain; this.processManager = processManager; this.taskManager = taskManager; - this.nodeJsExecutablePath = nodeJsExecutablePath; - this.octorunScriptPath = octorunScriptPath; - loginManager = new LoginManager(keychain, processManager, taskManager, nodeJsExecutablePath, octorunScriptPath); + this.environment = environment; + loginManager = new LoginManager(keychain, processManager, taskManager, environment); } public ITask Logout(UriString host) @@ -40,16 +50,24 @@ public ITask Logout(UriString host) return loginManager.Logout(host); } - public void CreateRepository(string name, string description, bool isPrivate, Action callback, string organization = null) + public void CreateRepository(string name, string description, bool isPrivate, + Action callback, string organization = null) { Guard.ArgumentNotNull(callback, "callback"); new FuncTask(taskManager.Token, () => { - var user = GetCurrentUser(); - var keychainAdapter = keychain.Connect(OriginalUrl); + EnsureValidCredentials(); + + var command = new StringBuilder("publish"); + + if (!HostAddress.IsGitHubDotCom()) + { + command.Append(" -h "); + command.Append(HostAddress.ApiUri.Host); + } - var command = new StringBuilder("publish -r \""); + command.Append(" -r \""); command.Append(name); command.Append("\""); @@ -72,8 +90,9 @@ public void CreateRepository(string name, string description, bool isPrivate, Ac command.Append(" -p"); } - var octorunTask = new OctorunTask(taskManager.Token, nodeJsExecutablePath, octorunScriptPath, command.ToString(), - user: user.Login, userToken: keychainAdapter.Credential.Token) + var adapter = EnsureKeychainAdapter(); + + var octorunTask = new OctorunTask(taskManager.Token, environment, command.ToString(), adapter.Credential.Token) .Configure(processManager); var ret = octorunTask.RunSynchronously(); @@ -101,16 +120,85 @@ public void CreateRepository(string name, string description, bool isPrivate, Ac .Start(); } + public void GetEnterpriseServerMeta(Action onSuccess, Action onError = null) + { + Guard.ArgumentNotNull(onSuccess, nameof(onSuccess)); + new FuncTask(taskManager.Token, () => + { + var octorunTask = new OctorunTask(taskManager.Token, environment, "meta -h " + HostAddress.ApiUri.Host) + .Configure(processManager); + + var ret = octorunTask.RunSynchronously(); + if (ret.IsSuccess) + { + var deserializeObject = SimpleJson.DeserializeObject>(ret.Output[0]); + + return new GitHubHostMeta + { + InstalledVersion = (string)deserializeObject["installed_version"], + GithubServicesSha = (string)deserializeObject["github_services_sha"], + VerifiablePasswordAuthentication = (bool)deserializeObject["verifiable_password_authentication"] + }; + } + + var message = ret.GetApiErrorMessage(); + + logger.Trace("Message: {0}", message); + + if (message != null) + { + if (message.Contains("ETIMEDOUT", StringComparison.InvariantCulture)) + { + message = "Connection timed out."; + } + else if (message.Contains("ECONNREFUSED", StringComparison.InvariantCulture)) + { + message = "Connection refused."; + } + else if (message.Contains("ENOTFOUND", StringComparison.InvariantCulture)) + { + message = "Address not found."; + } + else + { + int httpStatusCode; + if (int.TryParse(message, out httpStatusCode)) + { + var httpStatus = ((HttpStatusCode)httpStatusCode).ToString(); + message = httpStatusErrorRegex.Replace(httpStatus, " $1"); + } + } + } + else + { + message = "Error getting server meta"; + } + + throw new ApiClientException(message); + }) + .FinallyInUI((success, ex, meta) => + { + if (success) + onSuccess(meta); + else + { + logger.Error(ex, "Error getting server meta"); + onError?.Invoke(ex); + } + }) + .Start(); + } + public void GetOrganizations(Action onSuccess, Action onError = null) { Guard.ArgumentNotNull(onSuccess, nameof(onSuccess)); new FuncTask(taskManager.Token, () => { - var user = GetCurrentUser(); - var keychainAdapter = keychain.Connect(OriginalUrl); + var adapter = EnsureKeychainAdapter(); - var octorunTask = new OctorunTask(taskManager.Token, nodeJsExecutablePath, octorunScriptPath, "organizations", - user: user.Login, userToken: keychainAdapter.Credential.Token) + var command = HostAddress.IsGitHubDotCom() ? "organizations" : "organizations -h " + HostAddress.ApiUri.Host; + var octorunTask = new OctorunTask(taskManager.Token, environment, + command, adapter.Credential.Token) .Configure(processManager); var ret = octorunTask.RunSynchronously(); @@ -143,6 +231,17 @@ public void GetOrganizations(Action onSuccess, Action .Start(); } + private IKeychainAdapter EnsureKeychainAdapter() + { + var adapter = KeychainAdapter; + if (adapter.Credential == null) + { + throw new ApiClientException("No Credentials found"); + } + + return adapter; + } + public void GetCurrentUser(Action onSuccess, Action onError = null) { Guard.ArgumentNotNull(onSuccess, nameof(onSuccess)); @@ -157,13 +256,72 @@ public void GetCurrentUser(Action onSuccess, Action onErr .Start(); } + public void LoginWithToken(string token, Action result) + { + Guard.ArgumentNotNull(token, "token"); + Guard.ArgumentNotNull(result, "result"); + + new FuncTask(taskManager.Token, + () => loginManager.LoginWithToken(HostAddress.WebUri.Host, token)) + .FinallyInUI((success, ex, res) => + { + if (!success) + { + logger.Warning(ex); + result(false); + return; + } + + result(res); + }) + .Start(); + } + + public void CreateOAuthToken(string code, Action result) + { + var command = "token -h " + HostAddress.WebUri.Host; + var octorunTask = new OctorunTask(taskManager.Token, environment, command, code) + .Configure(processManager); + + octorunTask + .Then((b, octorunResult) => + { + if (b && octorunResult.IsSuccess) + { + var first = octorunResult.Output.FirstOrDefault(); + if (first == null) + { + result(false, "Error validating token."); + return; + } + + var match = accessTokenRegex.Match(first); + if (match.Success) + { + var token = match.Groups[1].Value; + LoginWithToken(token, b1 => result(b1, "Error validating token.")); + } + else + { + result(false, octorunResult.Output.FirstOrDefault()); + } + } + else + { + result(false, octorunResult.Output.FirstOrDefault()); + } + }) + .Catch(exception => result(false, exception.ToString())) + .Start(); + } + public void Login(string username, string password, Action need2faCode, Action result) { Guard.ArgumentNotNull(need2faCode, "need2faCode"); Guard.ArgumentNotNull(result, "result"); new FuncTask(taskManager.Token, - () => loginManager.Login(OriginalUrl, username, password)) + () => loginManager.Login(HostAddress.WebUri.Host, username, password)) .FinallyInUI((success, ex, res) => { if (!success) @@ -206,51 +364,76 @@ public void ContinueLogin(LoginResult loginResult, string code) .Start(); } - private GitHubUser GetCurrentUser() + public void EnsureValidCredentials() { - //TODO: ONE_USER_LOGIN This assumes we only support one login - var keychainConnection = keychain.Connections.FirstOrDefault(); - if (keychainConnection == null) - throw new KeychainEmptyException(); - - var keychainAdapter = GetValidatedKeychainAdapter(keychainConnection); + GetCurrentUser(); + } + public GitHubUser GetCurrentUser() + { // we can't trust that the system keychain has the username filled out correctly. // if it doesn't, we need to grab the username from the server and check it // unfortunately this means that things will be slower when the keychain doesn't have all the info - if (keychainConnection.User == null || keychainAdapter.Credential.Username != keychainConnection.Username) + if (Connection.User == null || KeychainAdapter.Credential.Username != Connection.Username) { - keychainConnection.User = GetValidatedGitHubUser(keychainConnection, keychainAdapter); + Connection.User = GetValidatedGitHubUser(); } - return keychainConnection.User; + + return Connection.User; } - private IKeychainAdapter GetValidatedKeychainAdapter(Connection keychainConnection) + private Connection Connection { - var keychainAdapter = keychain.Load(keychainConnection.Host); - if (keychainAdapter == null) - throw new KeychainEmptyException(); - - if (string.IsNullOrEmpty(keychainAdapter.Credential?.Username)) + get { - logger.Warning("LoadKeychainInternal: Username is empty"); - throw new TokenUsernameMismatchException(keychainConnection.Username); + if (connection == null) + { + connection = keychain.Connections.FirstOrDefault(x => x.Host.ToUriString().Host == HostAddress.WebUri.Host); + } + + return connection; } + } - if (keychainAdapter.Credential.Username != keychainConnection.Username) + private IKeychainAdapter KeychainAdapter + { + get { - logger.Warning("LoadKeychainInternal: Token username does not match"); - } + if (keychainAdapter == null) + { + if (Connection == null) + throw new KeychainEmptyException(); - return keychainAdapter; + var loadedKeychainAdapter = keychain.LoadFromSystem(Connection.Host); + if (loadedKeychainAdapter == null) + throw new KeychainEmptyException(); + + if (string.IsNullOrEmpty(loadedKeychainAdapter.Credential?.Username)) + { + logger.Warning("LoadKeychainInternal: Username is empty"); + throw new TokenUsernameMismatchException(connection.Username); + } + + if (loadedKeychainAdapter.Credential.Username != connection.Username) + { + logger.Warning("LoadKeychainInternal: Token username does not match"); + } + + keychainAdapter = loadedKeychainAdapter; + } + + return keychainAdapter; + } } - private GitHubUser GetValidatedGitHubUser(Connection keychainConnection, IKeychainAdapter keychainAdapter) + private GitHubUser GetValidatedGitHubUser() { try { - var octorunTask = new OctorunTask(taskManager.Token, nodeJsExecutablePath, octorunScriptPath, "validate", - user: keychainConnection.Username, userToken: keychainAdapter.Credential.Token) + var adapter = EnsureKeychainAdapter(); + + var command = HostAddress.IsGitHubDotCom() ? "validate" : "validate -h " + HostAddress.ApiUri.Host; + var octorunTask = new OctorunTask(taskManager.Token, environment, command, adapter.Credential.Token) .Configure(processManager); var ret = octorunTask.RunSynchronously(); @@ -258,10 +441,10 @@ private GitHubUser GetValidatedGitHubUser(Connection keychainConnection, IKeycha { var login = ret.Output[1]; - if (login != keychainConnection.Username) + if (!string.Equals(login, Connection.Username, StringComparison.InvariantCultureIgnoreCase)) { logger.Trace("LoadKeychainInternal: Api username does not match"); - throw new TokenUsernameMismatchException(keychainConnection.Username, login); + throw new TokenUsernameMismatchException(Connection.Username, login); } return new GitHubUser @@ -286,13 +469,20 @@ private GitHubUser GetValidatedGitHubUser(Connection keychainConnection, IKeycha } } - class GitHubUser + public class GitHubHostMeta + { + public bool VerifiablePasswordAuthentication { get; set; } + public string GithubServicesSha { get; set; } + public string InstalledVersion { get; set; } + } + + public class GitHubUser { public string Name { get; set; } public string Login { get; set; } } - class GitHubRepository + public class GitHubRepository { public string Name { get; set; } public string CloneUrl { get; set; } @@ -315,7 +505,7 @@ protected ApiClientException(SerializationInfo info, StreamingContext context) : } [Serializable] - class TokenUsernameMismatchException : ApiClientException + public class TokenUsernameMismatchException : ApiClientException { public string CachedUsername { get; } public string CurrentUsername { get; } @@ -330,7 +520,7 @@ protected TokenUsernameMismatchException(SerializationInfo info, StreamingContex } [Serializable] - class KeychainEmptyException : ApiClientException + public class KeychainEmptyException : ApiClientException { public KeychainEmptyException() { diff --git a/src/GitHub.Api/Application/ApplicationInfo.cs b/src/GitHub.Api/Application/ApplicationInfo.cs index f70bf4241..f15f7d482 100644 --- a/src/GitHub.Api/Application/ApplicationInfo.cs +++ b/src/GitHub.Api/Application/ApplicationInfo.cs @@ -6,15 +6,32 @@ static partial class ApplicationInfo #if DEBUG public const string ApplicationName = "GitHub for Unity Debug"; public const string ApplicationProvider = "GitHub"; + public const string ApplicationSafeName = "GitHubUnity-dev"; #else public const string ApplicationName = "GitHubUnity"; public const string ApplicationProvider = "GitHub"; -#endif public const string ApplicationSafeName = "GitHubUnity"; +#endif public const string ApplicationDescription = "GitHub for Unity"; +#if DEBUG +/* + For external contributors, we have bundled a developer OAuth application + called `GitHub for Unity (dev)` so that you can complete the sign in flow + locally without needing to configure your own application. + This is for testing only and it is (obviously) public, proceed with caution. + + For a release build, you should create a new oauth application on github.com, + copy the `common/ApplicationInfo_Local.cs-example` + template to `common/ApplicationInfo_Local.cs` and fill out the `myClientId` and + `myClientSecret` fields for your oauth app. + */ + internal static string ClientId { get; private set; } = "924a97f36926f535e72c"; + internal static string ClientSecret { get; private set; } = "b4fa550b7f8e38034c6b1339084fa125eebb6155"; +#else internal static string ClientId { get; private set; } = ""; internal static string ClientSecret { get; private set; } = ""; +#endif public static string Version { get { return System.AssemblyVersionInformation.Version; } } diff --git a/src/GitHub.Api/Application/ApplicationManagerBase.cs b/src/GitHub.Api/Application/ApplicationManagerBase.cs index e009573d2..5d258087e 100644 --- a/src/GitHub.Api/Application/ApplicationManagerBase.cs +++ b/src/GitHub.Api/Application/ApplicationManagerBase.cs @@ -7,7 +7,7 @@ namespace GitHub.Unity { - class ApplicationManagerBase : IApplicationManager + public class ApplicationManagerBase : IApplicationManager { protected static ILogging Logger { get; } = LogHelper.GetLogger(); @@ -48,14 +48,13 @@ protected void Initialize() ApplicationConfiguration.GitTimeout = UserSettings.Get(Constants.GitTimeoutKey, ApplicationConfiguration.GitTimeout); Platform.Initialize(ProcessManager, TaskManager); progress.OnProgress += progressReporter.UpdateProgress; - UsageTracker = new UsageTracker(TaskManager, GitClient, ProcessManager, UserSettings, Environment, InstanceId.ToString()); + UsageTracker = new UsageTracker(TaskManager, GitClient, ProcessManager, UserSettings, Environment, Platform.Keychain, InstanceId.ToString()); #if ENABLE_METRICS var metricsService = new MetricsService(ProcessManager, TaskManager, - Environment.FileSystem, - Environment.NodeJsExecutablePath, - Environment.OctorunScriptPath); + Platform.Keychain, + Environment); UsageTracker.MetricsService = metricsService; #endif } @@ -138,7 +137,7 @@ public void Run() RestartRepository(); } - progress.UpdateProgress(100, 100, "Initialization failed"); + progress.UpdateProgress(100, 100, "Initialized"); } catch (Exception ex) { @@ -191,7 +190,7 @@ public void SetupGit(GitInstaller.GitInstallationState state) { if (Environment.RepositoryPath.IsInitialized) { - ConfigureMergeSettings(); + UpdateMergeSettings(); GitClient.LfsInstall() .Catch(e => @@ -281,23 +280,56 @@ public void InitializeRepository() thread.Start(); } - private void ConfigureMergeSettings() + private void ConfigureMergeSettings(string keyName = null) { var unityYamlMergeExec = Environment.UnityApplicationContents.Combine("Tools", "UnityYAMLMerge" + Environment.ExecutableExtension); - var yamlMergeCommand = Environment.IsWindows - ? $@"'{unityYamlMergeExec}' merge -p ""$BASE"" ""$REMOTE"" ""$LOCAL"" ""$MERGED""" - : $@"'{unityYamlMergeExec}' merge -p '$BASE' '$REMOTE' '$LOCAL' '$MERGED'"; - GitClient.SetConfig("merge.unityyamlmerge.cmd", yamlMergeCommand, GitConfigSource.Local).Catch(e => { - Logger.Error(e, "Error setting merge.unityyamlmerge.cmd"); + var yamlMergeCommand = $"'{unityYamlMergeExec}' merge -h -p --force %O %B %A %A"; + + keyName = keyName ?? "unityyamlmerge"; + + GitClient.SetConfig($"merge.{keyName}.name", "Unity SmartMerge (UnityYamlMerge)", GitConfigSource.Local).Catch(e => { + Logger.Error(e, "Error setting merge." + keyName + ".name"); + return true; + }).RunSynchronously(); + + GitClient.SetConfig($"merge.{keyName}.driver", yamlMergeCommand, GitConfigSource.Local).Catch(e => { + Logger.Error(e, "Error setting merge." + keyName + ".driver"); + return true; + }).RunSynchronously(); + + GitClient.SetConfig($"merge.{keyName}.recursive", "binary", GitConfigSource.Local).Catch(e => { + Logger.Error(e, "Error setting merge." + keyName + ".recursive"); + return true; + }).RunSynchronously(); + } + + private void UpdateMergeSettings() + { + var gitAttributesPath = Environment.RepositoryPath.Combine(".gitattributes"); + if (gitAttributesPath.FileExists()) + { + var readAllText = gitAttributesPath.ReadAllText(); + var containsLegacyUnityYamlMergeError = readAllText.Contains("unityamlmerge"); + + if (containsLegacyUnityYamlMergeError) + { + ConfigureMergeSettings("unityamlmerge"); + } + } + + GitClient.UnSetConfig("merge.unityyamlmerge.cmd", GitConfigSource.Local).Catch(e => { + Logger.Error(e, "Error removing merge.unityyamlmerge.cmd"); return true; }).RunSynchronously(); - GitClient.SetConfig("merge.unityyamlmerge.trustExitCode", "false", GitConfigSource.Local).Catch(e => { - Logger.Error(e, "Error setting merge.unityyamlmerge.trustExitCode"); + GitClient.UnSetConfig("merge.unityyamlmerge.trustExitCode", GitConfigSource.Local).Catch(e => { + Logger.Error(e, "Error removing merge.unityyamlmerge.trustExitCode"); return true; }).RunSynchronously(); + + ConfigureMergeSettings(); } public void RestartRepository() @@ -319,6 +351,8 @@ protected virtual void InitializeUI() {} protected virtual void InitializationComplete() {} private bool disposed = false; + private IOAuthCallbackManager oAuthCallbackManager; + protected virtual void Dispose(bool disposing) { if (disposing) @@ -357,6 +391,20 @@ public void Dispose() public ISettings SystemSettings { get { return Environment.SystemSettings; } } public ISettings UserSettings { get { return Environment.UserSettings; } } public IUsageTracker UsageTracker { get; protected set; } + + public IOAuthCallbackManager OAuthCallbackManager + { + get + { + if (oAuthCallbackManager == null) + { + oAuthCallbackManager = new OAuthCallbackManager(); + } + + return oAuthCallbackManager; + } + } + public bool IsBusy { get { return isBusy; } } protected TaskScheduler UIScheduler { get; private set; } protected SynchronizationContext SynchronizationContext { get; private set; } diff --git a/src/GitHub.Api/Application/IApiClient.cs b/src/GitHub.Api/Application/IApiClient.cs index 650595ce2..09c79611f 100644 --- a/src/GitHub.Api/Application/IApiClient.cs +++ b/src/GitHub.Api/Application/IApiClient.cs @@ -2,16 +2,18 @@ namespace GitHub.Unity { - interface IApiClient + public interface IApiClient { HostAddress HostAddress { get; } - UriString OriginalUrl { get; } void CreateRepository(string name, string description, bool isPrivate, Action callback, string organization = null); void GetOrganizations(Action onSuccess, Action onError = null); void Login(string username, string password, Action need2faCode, Action result); void ContinueLogin(LoginResult loginResult, string code); + void LoginWithToken(string token, Action result); ITask Logout(UriString host); void GetCurrentUser(Action onSuccess, Action onError = null); + void GetEnterpriseServerMeta(Action onSuccess, Action onError = null); + void CreateOAuthToken(string code, Action result); } } diff --git a/src/GitHub.Api/Application/IApplicationManager.cs b/src/GitHub.Api/Application/IApplicationManager.cs index 9b8b5f638..ab82a27d3 100644 --- a/src/GitHub.Api/Application/IApplicationManager.cs +++ b/src/GitHub.Api/Application/IApplicationManager.cs @@ -16,6 +16,7 @@ public interface IApplicationManager : IDisposable ITaskManager TaskManager { get; } IGitClient GitClient { get; } IUsageTracker UsageTracker { get; } + IOAuthCallbackManager OAuthCallbackManager { get; } bool IsBusy { get; } void Run(); void InitializeRepository(); @@ -23,4 +24,4 @@ public interface IApplicationManager : IDisposable void SetupGit(GitInstaller.GitInstallationState state); void RestartRepository(); } -} \ No newline at end of file +} diff --git a/src/GitHub.Api/Application/Organization.cs b/src/GitHub.Api/Application/Organization.cs index e78849dd6..8deea7d99 100644 --- a/src/GitHub.Api/Application/Organization.cs +++ b/src/GitHub.Api/Application/Organization.cs @@ -1,8 +1,8 @@ namespace GitHub.Unity { - class Organization + public class Organization { public string Name { get; set; } public string Login { get; set; } } -} \ No newline at end of file +} diff --git a/src/GitHub.Api/Authentication/Credential.cs b/src/GitHub.Api/Authentication/Credential.cs index 2e31f9838..86e76c458 100644 --- a/src/GitHub.Api/Authentication/Credential.cs +++ b/src/GitHub.Api/Authentication/Credential.cs @@ -16,7 +16,7 @@ public Credential(UriString host, string username, string token) this.Token = token; } - public void UpdateToken(string token, string username) + public void Update(string token, string username) { this.Token = token; this.Username = username; diff --git a/src/GitHub.Api/Authentication/ICredentialManager.cs b/src/GitHub.Api/Authentication/ICredentialManager.cs index 68bf53eb6..94aef5a97 100644 --- a/src/GitHub.Api/Authentication/ICredentialManager.cs +++ b/src/GitHub.Api/Authentication/ICredentialManager.cs @@ -8,7 +8,7 @@ public interface ICredential : IDisposable UriString Host { get; } string Username { get; } string Token { get; } - void UpdateToken(string token, string username); + void Update(string token, string username); } public interface ICredentialManager @@ -17,6 +17,5 @@ public interface ICredentialManager void Save(ICredential cred); void Delete(UriString host); bool HasCredentials(); - ICredential CachedCredentials { get; } } } diff --git a/src/GitHub.Api/Authentication/IKeychain.cs b/src/GitHub.Api/Authentication/IKeychain.cs index 92d5dc524..4a14e1e2f 100644 --- a/src/GitHub.Api/Authentication/IKeychain.cs +++ b/src/GitHub.Api/Authentication/IKeychain.cs @@ -6,15 +6,13 @@ namespace GitHub.Unity public interface IKeychain { IKeychainAdapter Connect(UriString host); - IKeychainAdapter Load(UriString host); + IKeychainAdapter LoadFromSystem(UriString host); void Clear(UriString host, bool deleteFromCredentialManager); - void Save(UriString host); - void SetCredentials(ICredential credential); + void SaveToSystem(UriString host); void Initialize(); Connection[] Connections { get; } IList Hosts { get; } bool HasKeys { get; } - void SetToken(UriString host, string token, string username); event Action ConnectionsChanged; } diff --git a/src/GitHub.Api/Authentication/ILoginManager.cs b/src/GitHub.Api/Authentication/ILoginManager.cs index 66d982ae0..c78112c0e 100644 --- a/src/GitHub.Api/Authentication/ILoginManager.cs +++ b/src/GitHub.Api/Authentication/ILoginManager.cs @@ -8,9 +8,9 @@ namespace GitHub.Unity interface ILoginManager { /// - /// Attempts to log into a GitHub server. + /// Attempts to log into a GitHub server with a username and password. /// - /// + /// The host. /// The username. /// The password. /// The logged in user. @@ -18,6 +18,7 @@ interface ILoginManager /// The login authorization failed. /// LoginResultData Login(UriString host, string username, string password); + LoginResultData ContinueLogin(LoginResultData loginResultData, string twofacode); /// @@ -26,5 +27,13 @@ interface ILoginManager /// The address of the server. /// ITask Logout(UriString hostAddress); + + /// + /// Attempts to log into a GitHub server with a token. + /// + /// The host. + /// The token. + /// + bool LoginWithToken(UriString host, string token); } } diff --git a/src/GitHub.Api/Authentication/Keychain.cs b/src/GitHub.Api/Authentication/Keychain.cs index 992cc26b3..c71d45cbe 100644 --- a/src/GitHub.Api/Authentication/Keychain.cs +++ b/src/GitHub.Api/Authentication/Keychain.cs @@ -67,7 +67,7 @@ public bool Equals(Connection other) } } - class Keychain : IKeychain + public class Keychain : IKeychain { const string ConnectionFile = "connections.json"; @@ -95,19 +95,16 @@ public Keychain(IEnvironment environment, ICredentialManager credentialManager) public IKeychainAdapter Connect(UriString host) { Guard.ArgumentNotNull(host, nameof(host)); - return FindOrCreateAdapter(host); } - public IKeychainAdapter Load(UriString host) + public IKeychainAdapter LoadFromSystem(UriString host) { Guard.ArgumentNotNull(host, nameof(host)); - var keychainAdapter = FindOrCreateAdapter(host); - var connection = GetConnection(host); - - var keychainItem = credentialManager.Load(host); - if (keychainItem == null) + var keychainAdapter = Connect(host) as KeychainAdapter; + var credential = credentialManager.Load(host); + if (credential == null) { logger.Warning("Cannot load host from Credential Manager; removing from cache"); Clear(host, false); @@ -115,12 +112,18 @@ public IKeychainAdapter Load(UriString host) } else { - if (keychainItem.Username != connection.Username) + keychainAdapter.Set(credential); + var connection = GetConnection(host); + if (connection.Username == null) { - logger.Warning("Keychain Username:\"{0}\" does not match cached Username:\"{1}\"; Hopefully it works", keychainItem.Username, connection.Username); + connection.Username = credential.Username; + SaveConnectionsToDisk(); } - keychainAdapter.Set(keychainItem); + if (credential.Username != connection.Username) + { + logger.Warning("Keychain Username:\"{0}\" does not match cached Username:\"{1}\"; Hopefully it works", credential.Username, connection.Username); + } } return keychainAdapter; } @@ -151,7 +154,7 @@ public void Clear(UriString host, bool deleteFromCredentialManager) RemoveCredential(host, deleteFromCredentialManager); } - public void Save(UriString host) + public void SaveToSystem(UriString host) { Guard.ArgumentNotNull(host, nameof(host)); @@ -159,24 +162,6 @@ public void Save(UriString host) AddConnection(new Connection(host, keychainAdapter.Credential.Username)); } - public void SetCredentials(ICredential credential) - { - Guard.ArgumentNotNull(credential, nameof(credential)); - - var keychainAdapter = GetKeychainAdapter(credential.Host); - keychainAdapter.Set(credential); - } - - public void SetToken(UriString host, string token, string username) - { - Guard.ArgumentNotNull(host, nameof(host)); - Guard.ArgumentNotNull(token, nameof(token)); - Guard.ArgumentNotNull(username, nameof(username)); - - var keychainAdapter = GetKeychainAdapter(host); - keychainAdapter.UpdateToken(token, username); - } - private void LoadConnectionsFromDisk() { if (cachePath.FileExists()) @@ -262,11 +247,11 @@ private void RemoveCredential(UriString host, bool deleteFromCredentialManager) private Connection GetConnection(UriString host) { if (!connections.ContainsKey(host)) - throw new ArgumentException($"{host} is not found", nameof(host)); + return AddConnection(new Connection(host, null)); return connections[host]; } - private void AddConnection(Connection connection) + private Connection AddConnection(Connection connection) { // create new connection in the connection cache for this host if (connections.ContainsKey(connection.Host)) @@ -274,6 +259,7 @@ private void AddConnection(Connection connection) else connections.Add(connection.Host, connection); SaveConnectionsToDisk(); + return connection; } private void RemoveConnection(UriString host) diff --git a/src/GitHub.Api/Authentication/KeychainAdapter.cs b/src/GitHub.Api/Authentication/KeychainAdapter.cs index abbe9895e..7a0c5d3aa 100644 --- a/src/GitHub.Api/Authentication/KeychainAdapter.cs +++ b/src/GitHub.Api/Authentication/KeychainAdapter.cs @@ -1,6 +1,6 @@ namespace GitHub.Unity { - class KeychainAdapter : IKeychainAdapter + public class KeychainAdapter : IKeychainAdapter { public ICredential Credential { get; private set; } @@ -9,9 +9,9 @@ public void Set(ICredential credential) Credential = credential; } - public void UpdateToken(string token, string username) + public void Update(string token, string username) { - Credential.UpdateToken(token, username); + Credential.Update(token, username); } public void Clear() @@ -23,5 +23,8 @@ public void Clear() public interface IKeychainAdapter { ICredential Credential { get; } + void Set(ICredential credential); + void Update(string token, string username); + void Clear(); } } diff --git a/src/GitHub.Api/Authentication/LoginManager.cs b/src/GitHub.Api/Authentication/LoginManager.cs index 44a337a54..6f892b659 100644 --- a/src/GitHub.Api/Authentication/LoginManager.cs +++ b/src/GitHub.Api/Authentication/LoginManager.cs @@ -1,4 +1,5 @@ using System; +using System.Text; using GitHub.Logging; namespace GitHub.Unity @@ -15,15 +16,14 @@ public enum LoginResultCodes /// /// Provides services for logging into a GitHub server. /// - class LoginManager : ILoginManager + public class LoginManager : ILoginManager { private readonly ILogging logger = LogHelper.GetLogger(); private readonly IKeychain keychain; private readonly IProcessManager processManager; private readonly ITaskManager taskManager; - private readonly NPath? nodeJsExecutablePath; - private readonly NPath? octorunScript; + private readonly IEnvironment environment; /// /// Initializes a new instance of the class. @@ -35,15 +35,39 @@ class LoginManager : ILoginManager /// public LoginManager( IKeychain keychain, IProcessManager processManager, ITaskManager taskManager, - NPath? nodeJsExecutablePath = null, NPath? octorunScript = null) + IEnvironment environment) { Guard.ArgumentNotNull(keychain, nameof(keychain)); this.keychain = keychain; this.processManager = processManager; this.taskManager = taskManager; - this.nodeJsExecutablePath = nodeJsExecutablePath; - this.octorunScript = octorunScript; + this.environment = environment; + } + + public bool LoginWithToken(UriString host, string token) + { + Guard.ArgumentNotNull(host, nameof(host)); + Guard.ArgumentNotNullOrWhiteSpace(token, nameof(token)); + + var keychainAdapter = keychain.Connect(host); + keychainAdapter.Set(new Credential(host, "[token]", token)); + + try + { + var username = RetrieveUsername(token, host); + keychainAdapter.Update(token, username); + keychain.SaveToSystem(host); + + return true; + } + catch (Exception e) + { + logger.Warning(e, "Login Exception"); + + keychain.Clear(host, false); + return false; + } } /// @@ -58,8 +82,8 @@ public LoginResultData Login( // Start by saving the username and password, these will be used by the `IGitHubClient` // until an authorization token has been created and acquired: - keychain.Connect(host); - keychain.SetCredentials(new Credential(host, username, password)); + var keychainAdapter = keychain.Connect(host); + keychainAdapter.Set(new Credential(host, username, password)); try { @@ -71,16 +95,13 @@ public LoginResultData Login( throw new InvalidOperationException("Returned token is null or empty"); } - if (loginResultData.Code == LoginResultCodes.Success) - { - username = RetrieveUsername(loginResultData, username); - } - - keychain.SetToken(host, loginResultData.Token, username); + keychainAdapter.Update(loginResultData.Token, username); if (loginResultData.Code == LoginResultCodes.Success) { - keychain.Save(host); + username = RetrieveUsername(loginResultData.Token, host); + keychainAdapter.Update(loginResultData.Token, username); + keychain.SaveToSystem(host); } return loginResultData; @@ -101,6 +122,9 @@ public LoginResultData ContinueLogin(LoginResultData loginResultData, string two { var host = loginResultData.Host; var keychainAdapter = keychain.Connect(host); + if (keychainAdapter.Credential == null) { + return new LoginResultData(LoginResultCodes.Failed, Localization.LoginFailed, host); + } var username = keychainAdapter.Credential.Username; var password = keychainAdapter.Credential.Token; try @@ -114,9 +138,10 @@ public LoginResultData ContinueLogin(LoginResultData loginResultData, string two throw new InvalidOperationException("Returned token is null or empty"); } - username = RetrieveUsername(loginResultData, username); - keychain.SetToken(host, loginResultData.Token, username); - keychain.Save(host); + keychainAdapter.Update(loginResultData.Token, username); + username = RetrieveUsername(loginResultData.Token, host); + keychainAdapter.Update(loginResultData.Token, username); + keychain.SaveToSystem(host); return loginResultData; } @@ -146,22 +171,23 @@ private LoginResultData TryLogin( string code = null ) { - if (!nodeJsExecutablePath.HasValue) + var hasTwoFactorCode = code != null; + + var command = new StringBuilder("login"); + + if (hasTwoFactorCode) { - throw new InvalidOperationException("nodeJsExecutablePath must be set"); + command.Append(" --twoFactor"); } - if (!octorunScript.HasValue) + if (!HostAddress.IsGitHubDotCom(host)) { - throw new InvalidOperationException("octorunScript must be set"); + command.Append(" -h "); + command.Append(host.Host); } - var hasTwoFactorCode = code != null; - - var arguments = hasTwoFactorCode ? "login --twoFactor" : "login"; - var loginTask = new OctorunTask(taskManager.Token, nodeJsExecutablePath.Value, octorunScript.Value, - arguments, ApplicationInfo.ClientId, ApplicationInfo.ClientSecret); - loginTask.Configure(processManager, workingDirectory: octorunScript.Value.Parent.Parent, withInput: true); + var loginTask = new OctorunTask(taskManager.Token, environment, command.ToString()); + loginTask.Configure(processManager, withInput: true); loginTask.OnStartProcess += proc => { proc.StandardInput.WriteLine(username); @@ -191,15 +217,11 @@ private LoginResultData TryLogin( return new LoginResultData(LoginResultCodes.Failed, ret.GetApiErrorMessage() ?? "Failed.", host); } - private string RetrieveUsername(LoginResultData loginResultData, string username) + private string RetrieveUsername(string token, UriString host) { - if (!username.Contains("@")) - { - return username; - } - - var octorunTask = new OctorunTask(taskManager.Token, nodeJsExecutablePath.Value, octorunScript.Value, "validate", - user: username, userToken: loginResultData.Token).Configure(processManager); + var command = HostAddress.IsGitHubDotCom(host) ? "validate" : "validate -h " + host.Host; + var octorunTask = new OctorunTask(taskManager.Token, environment, command, token) + .Configure(processManager); var validateResult = octorunTask.RunSynchronously(); if (!validateResult.IsSuccess) @@ -211,7 +233,7 @@ private string RetrieveUsername(LoginResultData loginResultData, string username } } - class LoginResultData + public class LoginResultData { public LoginResultCodes Code; public string Message; diff --git a/src/GitHub.Api/Authentication/OAuthCallbackManager.cs b/src/GitHub.Api/Authentication/OAuthCallbackManager.cs new file mode 100644 index 000000000..b5b75b094 --- /dev/null +++ b/src/GitHub.Api/Authentication/OAuthCallbackManager.cs @@ -0,0 +1,106 @@ +using System; +using System.IO; +using System.Net; +using System.Text; +using System.Threading; +using System.Threading.Tasks; +using System.Web; +using GitHub.Logging; + +namespace GitHub.Unity +{ + public interface IOAuthCallbackManager + { + event Action OnCallback; + bool IsRunning { get; } + void Start(); + void Stop(); + } + + public class OAuthCallbackManager : IOAuthCallbackManager + { + const int CallbackPort = 42424; + public static readonly Uri CallbackUrl = new Uri($"http://localhost:{CallbackPort}/callback"); + + private static readonly ILogging logger = LogHelper.GetLogger(); + private static readonly object _lock = new object(); + + + private readonly CancellationTokenSource cancelSource; + + private HttpListener httpListener; + public bool IsRunning { get; private set; } + + public event Action OnCallback; + + public OAuthCallbackManager() + { + cancelSource = new CancellationTokenSource(); + } + + public void Start() + { + if (!IsRunning) + { + lock(_lock) + { + if (!IsRunning) + { + logger.Trace("Starting"); + + httpListener = new HttpListener(); + httpListener.Prefixes.Add(CallbackUrl.AbsoluteUri + "/"); + httpListener.Start(); + Task.Factory.StartNew(Listen, cancelSource.Token); + IsRunning = true; + } + } + } + } + + public void Stop() + { + logger.Trace("Stopping"); + cancelSource.Cancel(); + } + + private void Listen() + { + try + { + using (httpListener) + { + using (cancelSource.Token.Register(httpListener.Stop)) + { + while (true) + { + var context = httpListener.GetContext(); + var queryParts = HttpUtility.ParseQueryString(context.Request.Url.Query); + + var state = queryParts["state"]; + var code = queryParts["code"]; + + logger.Trace("OnCallback: {0}", state); + if (OnCallback != null) + { + OnCallback(state, code); + } + + context.Response.StatusCode = 200; + context.Response.Close(); + } + } + } + } + catch (Exception ex) + { + logger.Trace(ex.Message); + } + finally + { + IsRunning = false; + httpListener = null; + } + } + } +} diff --git a/src/GitHub.Api/Cache/CacheContainer.cs b/src/GitHub.Api/Cache/CacheContainer.cs index 40f1802fb..2c07ff867 100644 --- a/src/GitHub.Api/Cache/CacheContainer.cs +++ b/src/GitHub.Api/Cache/CacheContainer.cs @@ -90,6 +90,7 @@ public void Dispose() public IBranchCache BranchCache { get { return (IBranchCache)caches[CacheType.Branches].Value; } } public IGitLogCache GitLogCache { get { return (IGitLogCache)caches[CacheType.GitLog].Value; } } + public IGitFileLogCache GitFileLogCache { get { return (IGitFileLogCache)caches[CacheType.GitFileLog].Value; } } public IGitAheadBehindCache GitTrackingStatusCache { get { return (IGitAheadBehindCache)caches[CacheType.GitAheadBehind].Value; } } public IGitStatusCache GitStatusEntriesCache { get { return (IGitStatusCache)caches[CacheType.GitStatus].Value; } } public IGitLocksCache GitLocksCache { get { return (IGitLocksCache)caches[CacheType.GitLocks].Value; } } diff --git a/src/GitHub.Api/Cache/CacheInterfaces.cs b/src/GitHub.Api/Cache/CacheInterfaces.cs index 35487451e..ae815beac 100644 --- a/src/GitHub.Api/Cache/CacheInterfaces.cs +++ b/src/GitHub.Api/Cache/CacheInterfaces.cs @@ -9,6 +9,7 @@ public enum CacheType RepositoryInfo, Branches, GitLog, + GitFileLog, GitAheadBehind, GitStatus, GitLocks, @@ -22,6 +23,7 @@ public interface ICacheContainer : IDisposable IBranchCache BranchCache { get; } IGitLogCache GitLogCache { get; } + IGitFileLogCache GitFileLogCache { get; } IGitAheadBehindCache GitTrackingStatusCache { get; } IGitStatusCache GitStatusEntriesCache { get; } IGitLocksCache GitLocksCache { get; } @@ -40,6 +42,7 @@ public interface IManagedCache bool ValidateData(); void InvalidateData(); + void ResetInvalidation(); DateTimeOffset LastUpdatedAt { get; } CacheType CacheType { get; } @@ -91,7 +94,7 @@ public interface IBranchCache : IManagedCache ILocalConfigBranchDictionary LocalConfigBranches { get; } IRemoteConfigBranchDictionary RemoteConfigBranches { get; } IConfigRemoteDictionary ConfigRemotes { get; } - + void SetRemotes(Dictionary remoteConfigs, Dictionary> configBranches, GitRemote[] gitRemotes, GitBranch[] gitBranches); void SetLocals(Dictionary configBranches, GitBranch[] gitBranches); } @@ -114,6 +117,11 @@ public interface IGitLogCache : IManagedCache List Log { get; set; } } + public interface IGitFileLogCache : IManagedCache + { + GitFileLog FileLog { get; set; } + } + public interface ICanUpdate { void UpdateData(T data); diff --git a/src/GitHub.Api/Events/RepositoryWatcher.cs b/src/GitHub.Api/Events/RepositoryWatcher.cs index 582790303..a9c852fc6 100644 --- a/src/GitHub.Api/Events/RepositoryWatcher.cs +++ b/src/GitHub.Api/Events/RepositoryWatcher.cs @@ -8,7 +8,7 @@ namespace GitHub.Unity { - interface IRepositoryWatcher : IDisposable + public interface IRepositoryWatcher : IDisposable { void Start(); void Stop(); @@ -23,13 +23,14 @@ interface IRepositoryWatcher : IDisposable int CheckAndProcessEvents(); } - class RepositoryWatcher : IRepositoryWatcher + public class RepositoryWatcher : IRepositoryWatcher { private readonly RepositoryPathConfiguration paths; private readonly CancellationToken cancellationToken; private readonly NPath[] ignoredPaths; private readonly ManualResetEventSlim pauseEvent; private NativeInterface nativeInterface; + private NativeInterface worktreeNativeInterface; private bool running; private int lastCountOfProcessedEvents = 0; private bool processingEvents; @@ -64,6 +65,11 @@ public void Initialize() try { nativeInterface = new NativeInterface(pathsRepositoryPath); + + if (paths.IsWorktree) + { + worktreeNativeInterface = new NativeInterface(paths.WorktreeDotGitPath); + } } catch (Exception ex) { @@ -80,6 +86,18 @@ public void Start() } Logger.Trace("Watching Path: \"{0}\"", paths.RepositoryPath.ToString()); + + if (paths.IsWorktree) + { + if (worktreeNativeInterface == null) + { + Logger.Warning("Worktree NativeInterface is null"); + throw new InvalidOperationException("Worktree NativeInterface is null"); + } + + Logger.Trace("Watching Additional Path for Worktree: \"{0}\"", paths.WorktreeDotGitPath); + } + running = true; pauseEvent.Reset(); Task.Factory.StartNew(WatcherLoop, cancellationToken, TaskCreationOptions.None, TaskScheduler.Default); @@ -131,6 +149,15 @@ public int CheckAndProcessEvents() processedEventCount = ProcessEvents(fileEvents); } + if (worktreeNativeInterface != null) + { + fileEvents = worktreeNativeInterface.GetEvents(); + if (fileEvents.Length > 0) + { + processedEventCount = processedEventCount + ProcessEvents(fileEvents); + } + } + lastCountOfProcessedEvents = processedEventCount; processingEvents = false; signalProcessingEventsDone.Set(); @@ -158,7 +185,7 @@ private int ProcessEvents(Event[] fileEvents) var fileA = eventDirectory.Combine(fileEvent.FileA); // handling events in .git/* - if (fileA.IsChildOf(paths.DotGitPath)) + if (fileA.IsChildOf(paths.DotGitPath) || (paths.WorktreeDotGitPath.IsInitialized && fileA.IsChildOf(paths.WorktreeDotGitPath))) { if (!events.Contains(EventType.ConfigChanged) && fileA.Equals(paths.DotGitConfig)) { diff --git a/src/GitHub.Api/Git/FailureSeverity.cs b/src/GitHub.Api/Git/FailureSeverity.cs index fdaa58345..3d34f95ef 100644 --- a/src/GitHub.Api/Git/FailureSeverity.cs +++ b/src/GitHub.Api/Git/FailureSeverity.cs @@ -1,8 +1,8 @@ namespace GitHub.Unity { - enum FailureSeverity + public enum FailureSeverity { Moderate, Critical }; -} \ No newline at end of file +} diff --git a/src/GitHub.Api/Git/GitBranch.cs b/src/GitHub.Api/Git/GitBranch.cs index 857212810..ecb6fede9 100644 --- a/src/GitHub.Api/Git/GitBranch.cs +++ b/src/GitHub.Api/Git/GitBranch.cs @@ -10,12 +10,12 @@ public struct GitBranch public string name; public string tracking; - public GitBranch(string name, string tracking) + public GitBranch(string name, string tracking = null) { Guard.ArgumentNotNullOrWhiteSpace(name, "name"); this.name = name; - this.tracking = tracking; + this.tracking = tracking ?? string.Empty; } public override int GetHashCode() @@ -64,7 +64,7 @@ public bool Equals(GitBranch other) public override string ToString() { - return $"{Name} Tracking? {Tracking}"; + return $"{Name} Tracking? {Tracking ?? "[NULL]"}"; } } -} \ No newline at end of file +} diff --git a/src/GitHub.Api/Git/GitClient.cs b/src/GitHub.Api/Git/GitClient.cs index 8dc75d6b6..6aca3146b 100644 --- a/src/GitHub.Api/Git/GitClient.cs +++ b/src/GitHub.Api/Git/GitClient.cs @@ -2,48 +2,310 @@ using System; using System.Collections.Generic; using System.Threading; +using GitHub.Unity.Git.Tasks; using static GitHub.Unity.GitInstaller; namespace GitHub.Unity { + /// + /// Client that provides access to git functionality + /// public interface IGitClient { + /// + /// Executes `git init` to initialize a git repo. + /// + /// A custom output processor instance + /// String output of git command ITask Init(IOutputProcessor processor = null); + + /// + /// Executes `git lfs install` to install LFS hooks. + /// + /// A custom output processor instance + /// String output of git command ITask LfsInstall(IOutputProcessor processor = null); + + /// + /// Executes `git rev-list` to determine the ahead/behind status between two refs. + /// + /// Ref to compare + /// Ref to compare against + /// A custom output processor instance + /// output ITask AheadBehindStatus(string gitRef, string otherRef, IOutputProcessor processor = null); + + /// + /// Executes `git status` to determine the working directory status. + /// + /// A custom output processor instance + /// output ITask Status(IOutputProcessor processor = null); + + /// + /// Executes `git config get` to get a configuration value. + /// + /// The configuration key to get + /// The config source (unspecified, local,user,global) to use + /// A custom output processor instance + /// String output of git command ITask GetConfig(string key, GitConfigSource configSource, IOutputProcessor processor = null); + + /// + /// Executes `git config set` to set a configuration value. + /// + /// The configuration key to set + /// The value to set + /// The config source (unspecified, local,user,global) to use + /// A custom output processor instance + /// String output of git command ITask SetConfig(string key, string value, GitConfigSource configSource, IOutputProcessor processor = null); + + /// + /// Executes `git config --unset` to remove a configuration value. + /// + /// The configuration key to remove + /// The config source (unspecified, local,user,global) to use + /// A custom output processor instance + /// String output of git command + ITask UnSetConfig(string key, GitConfigSource configSource, IOutputProcessor processor = null); + + /// + /// Executes two `git config get` commands to get the git user and email. + /// + /// output ITask GetConfigUserAndEmail(); + + /// + /// Executes `git lfs locks` to get a list of lfs locks from the git lfs server. + /// + /// + /// A custom output processor instance + /// of output ITask> ListLocks(bool local, BaseOutputListProcessor processor = null); + + /// + /// Executes `git pull` to perform a pull operation. + /// + /// The remote to pull from + /// The branch to pull + /// A custom output processor instance + /// String output of git command ITask Pull(string remote, string branch, IOutputProcessor processor = null); + + /// + /// Executes `git push` to perform a push operation. + /// + /// The remote to push to + /// The branch to push + /// A custom output processor instance + /// String output of git command ITask Push(string remote, string branch, IOutputProcessor processor = null); + + /// + /// Executes `git revert` to perform a revert operation. + /// + /// The changeset to revert + /// A custom output processor instance + /// String output of git command ITask Revert(string changeset, IOutputProcessor processor = null); + + /// + /// Executes `git fetch` to perform a fetch operation. + /// + /// The remote to fetch from + /// A custom output processor instance + /// String output of git command ITask Fetch(string remote, IOutputProcessor processor = null); + + /// + /// Executes `git checkout` to switch branches. + /// + /// The branch to checkout + /// A custom output processor instance + /// String output of git command ITask SwitchBranch(string branch, IOutputProcessor processor = null); + + /// + /// Executes `git branch -d` to delete a branch. + /// + /// The branch to delete + /// The flag to indicate the branch should be deleted even if not merged + /// A custom output processor instance + /// String output of git command ITask DeleteBranch(string branch, bool deleteUnmerged = false, IOutputProcessor processor = null); + + /// + /// Executes `git branch` to create a branch. + /// + /// The name of branch to create + /// The name of branch to create from + /// A custom output processor instance + /// String output of git command ITask CreateBranch(string branch, string baseBranch, IOutputProcessor processor = null); + + /// + /// Executes `git remote add` to add a git remote. + /// + /// The remote to add + /// The url of the remote + /// A custom output processor instance + /// String output of git command ITask RemoteAdd(string remote, string url, IOutputProcessor processor = null); + + /// + /// Executes `git remote rm` to remove a git remote. + /// + /// The remote to remove + /// A custom output processor instance + /// String output of git command ITask RemoteRemove(string remote, IOutputProcessor processor = null); + + /// + /// Executes `git remote set-url` to change the url of a git remote. + /// + /// The remote to change + /// The url to change to + /// A custom output processor instance + /// String output of git command ITask RemoteChange(string remote, string url, IOutputProcessor processor = null); + + /// + /// Executes `git commit` to perform a commit operation. + /// + /// The commit message summary + /// The commit message body + /// A custom output processor instance + /// String output of git command ITask Commit(string message, string body, IOutputProcessor processor = null); + + /// + /// Executes at least one `git add` command to add the list of files to the git index. + /// + /// The file to add + /// A custom output processor instance + /// String output of git command ITask Add(IList files, IOutputProcessor processor = null); + + /// + /// Executes `git add -A` to add all files to the git index. + /// + /// A custom output processor instance + /// String output of git command ITask AddAll(IOutputProcessor processor = null); + + /// + /// Executes at least one `git checkout` command to discard changes to the list of files. + /// + /// The files to discard + /// A custom output processor instance + /// String output of git command ITask Discard(IList files, IOutputProcessor processor = null); + + /// + /// Executes `git checkout -- .` to discard all changes in the working directory. + /// + /// A custom output processor instance + /// String output of git command ITask DiscardAll(IOutputProcessor processor = null); + + /// + /// Executes at least one `git checkout` command to checkout files at the given changeset + /// + /// The md5 of the changeset + /// The files to check out + /// A custom output processor instance + /// String output of git command + ITask CheckoutVersion(string changeset, IList files, IOutputProcessor processor = null); + + /// + /// Executes at least one `git reset HEAD` command to remove files from the git index. + /// + /// The files to remove + /// A custom output processor instance + /// String output of git command ITask Remove(IList files, IOutputProcessor processor = null); + + /// + /// Executes at least one `git add` command to add the list of files to the git index. Followed by a `git commit` command to commit the changes. + /// + /// The files to add and commit + /// The commit message summary + /// The commit message body + /// A custom output processor instance + /// String output of git command ITask AddAndCommit(IList files, string message, string body, IOutputProcessor processor = null); + + /// + /// Executes `git lfs lock` to lock a file. + /// + /// The file to lock + /// A custom output processor instance + /// String output of git command ITask Lock(NPath file, IOutputProcessor processor = null); + + /// + /// Executes `git lfs unlock` to unlock a file. + /// + /// The file to unlock + /// If force should be used + /// A custom output processor instance + /// String output of git command ITask Unlock(NPath file, bool force, IOutputProcessor processor = null); + + /// + /// Executes `git log` to get the history of the current branch. + /// + /// A custom output processor instance + /// of output ITask> Log(BaseOutputListProcessor processor = null); + + /// + /// Executes `git log -- ` to get the history of a specific file. + /// + /// + /// A custom output processor instance + /// of output + ITask> LogFile(string file, BaseOutputListProcessor processor = null); + + /// + /// Executes `git --version` to get the git version. + /// + /// A custom output processor instance + /// output ITask Version(IOutputProcessor processor = null); + + /// + /// Executes `git lfs version` to get the git lfs version. + /// + /// A custom output processor instance + /// output ITask LfsVersion(IOutputProcessor processor = null); + + /// + /// Executes `git count-objects` to get the size of the git repo in kilobytes. + /// + /// A custom output processor instance + /// output ITask CountObjects(IOutputProcessor processor = null); + + /// + /// Executes two `git set config` commands to set the git name and email. + /// + /// The username to set + /// The email to set + /// output ITask SetConfigNameAndEmail(string username, string email); + + /// + /// Executes `git rev-parse --short HEAD` to get the current commit sha of the current branch. + /// + /// A custom output processor instance + /// String output of git command ITask GetHead(IOutputProcessor processor = null); } - class GitClient : IGitClient + public class GitClient : IGitClient { private const string UserNameConfigKey = "user.name"; private const string UserEmailConfigKey = "user.email"; @@ -58,66 +320,104 @@ public GitClient(IEnvironment environment, IProcessManager processManager, Cance this.cancellationToken = cancellationToken; } + /// public ITask Init(IOutputProcessor processor = null) { return new GitInitTask(cancellationToken, processor) .Configure(processManager); } + /// public ITask LfsInstall(IOutputProcessor processor = null) { return new GitLfsInstallTask(cancellationToken, processor) .Configure(processManager); } + /// public ITask Status(IOutputProcessor processor = null) { return new GitStatusTask(new GitObjectFactory(environment), cancellationToken, processor) .Configure(processManager); } + /// public ITask AheadBehindStatus(string gitRef, string otherRef, IOutputProcessor processor = null) { return new GitAheadBehindStatusTask(gitRef, otherRef, cancellationToken, processor) .Configure(processManager); } + /// public ITask> Log(BaseOutputListProcessor processor = null) { return new GitLogTask(new GitObjectFactory(environment), cancellationToken, processor) - .Configure(processManager); + .Configure(processManager) + .Catch(exception => exception is ProcessException && + exception.Message.StartsWith("fatal: your current branch") && + exception.Message.EndsWith("does not have any commits yet")) + .Then((success, list) => success ? list : new List()); + } + + /// + public ITask> LogFile(string file, BaseOutputListProcessor processor = null) + { + if (file == NPath.Default) + { + return new FuncTask>(cancellationToken, () => new List(0)); + } + + return new GitLogTask(file, new GitObjectFactory(environment), cancellationToken, processor) + .Configure(processManager) + .Catch(exception => exception is ProcessException && + exception.Message.StartsWith("fatal: your current branch") && + exception.Message.EndsWith("does not have any commits yet")) + .Then((success, list) => success ? list : new List()); } + /// public ITask Version(IOutputProcessor processor = null) { return new GitVersionTask(cancellationToken, processor) .Configure(processManager); } + /// public ITask LfsVersion(IOutputProcessor processor = null) { return new GitLfsVersionTask(cancellationToken, processor) .Configure(processManager); } + /// public ITask CountObjects(IOutputProcessor processor = null) { return new GitCountObjectsTask(cancellationToken, processor) .Configure(processManager); } + /// public ITask GetConfig(string key, GitConfigSource configSource, IOutputProcessor processor = null) { return new GitConfigGetTask(key, configSource, cancellationToken, processor) .Configure(processManager); } + /// public ITask SetConfig(string key, string value, GitConfigSource configSource, IOutputProcessor processor = null) { return new GitConfigSetTask(key, value, configSource, cancellationToken, processor) .Configure(processManager); } + /// + public ITask UnSetConfig(string key, GitConfigSource configSource, IOutputProcessor processor = null) + { + return new GitConfigUnSetTask(key, configSource, cancellationToken, processor) + .Configure(processManager); + } + + /// public ITask GetConfigUserAndEmail() { string username = null; @@ -141,6 +441,7 @@ public ITask GetConfigUserAndEmail() }); } + /// public ITask SetConfigNameAndEmail(string username, string email) { return SetConfig(UserNameConfigKey, username, GitConfigSource.User) @@ -148,18 +449,21 @@ public ITask SetConfigNameAndEmail(string username, string email) .Then(b => new GitUser(username, email)); } + /// public ITask> ListLocks(bool local, BaseOutputListProcessor processor = null) { return new GitListLocksTask(local, cancellationToken, processor) .Configure(processManager, environment.GitLfsExecutablePath); } + /// public ITask Pull(string remote, string branch, IOutputProcessor processor = null) { return new GitPullTask(remote, branch, cancellationToken, processor) .Configure(processManager); } + /// public ITask Push(string remote, string branch, IOutputProcessor processor = null) { @@ -167,12 +471,14 @@ public ITask Push(string remote, string branch, .Configure(processManager); } + /// public ITask Revert(string changeset, IOutputProcessor processor = null) { return new GitRevertTask(changeset, cancellationToken, processor) .Configure(processManager); } + /// public ITask Fetch(string remote, IOutputProcessor processor = null) { @@ -180,12 +486,14 @@ public ITask Fetch(string remote, .Configure(processManager); } + /// public ITask SwitchBranch(string branch, IOutputProcessor processor = null) { return new GitSwitchBranchesTask(branch, cancellationToken, processor) .Configure(processManager); } + /// public ITask DeleteBranch(string branch, bool deleteUnmerged = false, IOutputProcessor processor = null) { @@ -193,6 +501,7 @@ public ITask DeleteBranch(string branch, bool deleteUnmerged = false, .Configure(processManager); } + /// public ITask CreateBranch(string branch, string baseBranch, IOutputProcessor processor = null) { @@ -200,6 +509,7 @@ public ITask CreateBranch(string branch, string baseBranch, .Configure(processManager); } + /// public ITask RemoteAdd(string remote, string url, IOutputProcessor processor = null) { @@ -207,6 +517,7 @@ public ITask RemoteAdd(string remote, string url, .Configure(processManager); } + /// public ITask RemoteRemove(string remote, IOutputProcessor processor = null) { @@ -214,6 +525,7 @@ public ITask RemoteRemove(string remote, .Configure(processManager); } + /// public ITask RemoteChange(string remote, string url, IOutputProcessor processor = null) { @@ -221,6 +533,7 @@ public ITask RemoteChange(string remote, string url, .Configure(processManager); } + /// public ITask Commit(string message, string body, IOutputProcessor processor = null) { @@ -228,12 +541,14 @@ public ITask Commit(string message, string body, .Configure(processManager); } + /// public ITask AddAll(IOutputProcessor processor = null) { return new GitAddTask(cancellationToken, processor) .Configure(processManager); } + /// public ITask Add(IList files, IOutputProcessor processor = null) { @@ -255,6 +570,7 @@ public ITask Add(IList files, return last; } + /// public ITask Discard( IList files, IOutputProcessor processor = null) { @@ -276,19 +592,43 @@ public ITask Discard( IList files, return last; } + /// public ITask DiscardAll(IOutputProcessor processor = null) { return new GitCheckoutTask(cancellationToken, processor) .Configure(processManager); } + /// + public ITask CheckoutVersion(string changeset, IList files, IOutputProcessor processor = null) + { + return new GitCheckoutTask(changeset, files, cancellationToken, processor) + .Configure(processManager); + } + + /// public ITask Remove(IList files, IOutputProcessor processor = null) { - return new GitRemoveFromIndexTask(files, cancellationToken, processor) - .Configure(processManager); + GitRemoveFromIndexTask last = null; + foreach (var batch in files.Spool(5000)) + { + var current = new GitRemoveFromIndexTask(batch, cancellationToken, processor).Configure(processManager); + if (last == null) + { + last = current; + } + else + { + last.Then(current); + last = current; + } + } + + return last; } + /// public ITask AddAndCommit(IList files, string message, string body, IOutputProcessor processor = null) { @@ -297,6 +637,7 @@ public ITask AddAndCommit(IList files, string message, string bo .Configure(processManager)); } + /// public ITask Lock(NPath file, IOutputProcessor processor = null) { @@ -304,6 +645,7 @@ public ITask Lock(NPath file, .Configure(processManager, environment.GitLfsExecutablePath); } + /// public ITask Unlock(NPath file, bool force, IOutputProcessor processor = null) { @@ -311,10 +653,15 @@ public ITask Unlock(NPath file, bool force, .Configure(processManager, environment.GitLfsExecutablePath); } + /// public ITask GetHead(IOutputProcessor processor = null) { return new FirstNonNullLineProcessTask(cancellationToken, "rev-parse --short HEAD") { Name = "Getting current head..." } - .Configure(processManager); + .Configure(processManager) + .Catch(exception => exception is ProcessException && + exception.Message.StartsWith("fatal: your current branch") && + exception.Message.EndsWith("does not have any commits yet")) + .Then((success, head) => success ? head : null); } protected static ILogging Logger { get; } = LogHelper.GetLogger(); diff --git a/src/GitHub.Api/Git/GitConfig.cs b/src/GitHub.Api/Git/GitConfig.cs index 2faa1c0ba..882538af3 100644 --- a/src/GitHub.Api/Git/GitConfig.cs +++ b/src/GitHub.Api/Git/GitConfig.cs @@ -167,7 +167,7 @@ public interface IGitConfig void SetInt(string section, string key, int value); } - class GitConfig : IGitConfig + public class GitConfig : IGitConfig { private readonly ConfigFileManager manager; private SectionParser sectionParser; @@ -296,7 +296,7 @@ private void SetAndWrite(string section, string key, string value) manager.Save(sb.ToString()); } - class Section : Dictionary> + public class Section : Dictionary> { public Section(string name, string description = null) { @@ -364,7 +364,7 @@ public override string ToString() public string Description { get; private set; } } - class SectionParser + public class SectionParser { private static readonly Regex CommentPattern = new Regex(@"^[;#].*", RegexOptions.Compiled); private static readonly Regex SectionPattern = new Regex(@"^\[(.*)\]$", RegexOptions.Compiled); @@ -463,7 +463,7 @@ private void EnsureFileBeginsWithSection() public Dictionary> GroupSections { get; private set; } } - class ConfigFileManager + public class ConfigFileManager { private static readonly string[] emptyContents = new string[0]; diff --git a/src/GitHub.Api/Git/GitCredentialManager.cs b/src/GitHub.Api/Git/GitCredentialManager.cs index 25b946d1b..fe090d0c7 100644 --- a/src/GitHub.Api/Git/GitCredentialManager.cs +++ b/src/GitHub.Api/Git/GitCredentialManager.cs @@ -1,18 +1,20 @@ using GitHub.Logging; using System; using System.Collections.Generic; +using System.Linq; +using GitHub.Unity.Git.Tasks; namespace GitHub.Unity { - class GitCredentialManager : ICredentialManager + public class GitCredentialManager : ICredentialManager { private static ILogging Logger { get; } = LogHelper.GetLogger(); - private ICredential credential; private string credHelper = null; private readonly IProcessManager processManager; private readonly ITaskManager taskManager; + private readonly Dictionary credentials = new Dictionary(); public GitCredentialManager(IProcessManager processManager, ITaskManager taskManager) @@ -23,11 +25,9 @@ public GitCredentialManager(IProcessManager processManager, public bool HasCredentials() { - return credential != null; + return credentials != null && credentials.Any(); } - public ICredential CachedCredentials { get { return credential; } } - public void Delete(UriString host) { if (!LoadCredentialHelper()) @@ -39,12 +39,13 @@ public void Delete(UriString host) String.Format("protocol={0}", host.Protocol), String.Format("host={0}", host.Host) }).RunSynchronously(); - credential = null; + credentials.Remove(host); } public ICredential Load(UriString host) { - if (credential == null) + ICredential credential; + if (!credentials.TryGetValue(host, out credential)) { if (!LoadCredentialHelper()) return null; @@ -60,7 +61,7 @@ public ICredential Load(UriString host) if (String.IsNullOrEmpty(kvpCreds)) { - Logger.Error("No credentials are stored"); + // we didn't find credentials, stop here return null; } @@ -87,23 +88,25 @@ public ICredential Load(UriString host) } credential = new Credential(host, user, password); + credentials.Add(host, credential); } + return credential; } public void Save(ICredential cred) { - this.credential = cred; + this.credentials.Add(cred.Host, cred); if (!LoadCredentialHelper()) return; var data = new List { - String.Format("protocol={0}", credential.Host.Protocol), - String.Format("host={0}", credential.Host.Host), - String.Format("username={0}", credential.Username), - String.Format("password={0}", credential.Token) + String.Format("protocol={0}", cred.Host.Protocol), + String.Format("host={0}", cred.Host.Host), + String.Format("username={0}", cred.Username), + String.Format("password={0}", cred.Token) }; var task = RunCredentialHelper("store", data.ToArray()); diff --git a/src/GitHub.Api/Git/GitFileLog.cs b/src/GitHub.Api/Git/GitFileLog.cs new file mode 100644 index 000000000..6795d6c4d --- /dev/null +++ b/src/GitHub.Api/Git/GitFileLog.cs @@ -0,0 +1,32 @@ +using System; +using System.Collections.Generic; + +namespace GitHub.Unity +{ + [Serializable] + public struct GitFileLog + { + public static GitFileLog Default = new GitFileLog(null, new List(0)); + + public string path; + public List logEntries; + + public GitFileLog(string path, List logEntries) + { + this.path = path; + this.logEntries = logEntries; + } + + public string Path + { + get { return path; } + set { path = value; } + } + + public List LogEntries + { + get { return logEntries; } + set { logEntries = value; } + } + } +} diff --git a/src/GitHub.Api/Git/GitObjectFactory.cs b/src/GitHub.Api/Git/GitObjectFactory.cs index 16aa5fdde..d0fe3b640 100644 --- a/src/GitHub.Api/Git/GitObjectFactory.cs +++ b/src/GitHub.Api/Git/GitObjectFactory.cs @@ -3,7 +3,7 @@ namespace GitHub.Unity { - class GitObjectFactory : IGitObjectFactory + public class GitObjectFactory : IGitObjectFactory { private readonly IEnvironment environment; @@ -12,13 +12,13 @@ public GitObjectFactory(IEnvironment environment) this.environment = environment; } - public GitStatusEntry CreateGitStatusEntry(string path, GitFileStatus status, string originalPath = null, bool staged = false) + public GitStatusEntry CreateGitStatusEntry(string path, GitFileStatus indexStatus, GitFileStatus workTreeStatus = GitFileStatus.None, string originalPath = null) { var absolutePath = new NPath(path).MakeAbsolute(); var relativePath = absolutePath.RelativeTo(environment.RepositoryPath); var projectPath = absolutePath.RelativeTo(environment.UnityProjectPath); - return new GitStatusEntry(relativePath, absolutePath, projectPath, status, originalPath?.ToNPath(), staged); + return new GitStatusEntry(relativePath, absolutePath, projectPath, indexStatus, workTreeStatus, originalPath?.ToNPath()); } } } diff --git a/src/GitHub.Api/Git/GitStatusEntry.cs b/src/GitHub.Api/Git/GitStatusEntry.cs index 1421c5554..8ba7c6d58 100644 --- a/src/GitHub.Api/Git/GitStatusEntry.cs +++ b/src/GitHub.Api/Git/GitStatusEntry.cs @@ -11,22 +11,22 @@ public struct GitStatusEntry public string fullPath; public string projectPath; public string originalPath; - public GitFileStatus status; - public bool staged; + public GitFileStatus indexStatus; + public GitFileStatus workTreeStatus; public GitStatusEntry(string path, string fullPath, string projectPath, - GitFileStatus status, - string originalPath = null, bool staged = false) + GitFileStatus indexStatus, GitFileStatus workTreeStatus, + string originalPath = null) { Guard.ArgumentNotNullOrWhiteSpace(path, "path"); Guard.ArgumentNotNullOrWhiteSpace(fullPath, "fullPath"); this.path = path; - this.status = status; + this.indexStatus = indexStatus; + this.workTreeStatus = workTreeStatus; this.fullPath = fullPath; this.projectPath = projectPath; this.originalPath = originalPath; - this.staged = staged; } public override int GetHashCode() @@ -36,8 +36,8 @@ public override int GetHashCode() hash = hash * 23 + (fullPath?.GetHashCode() ?? 0); hash = hash * 23 + (projectPath?.GetHashCode() ?? 0); hash = hash * 23 + (originalPath?.GetHashCode() ?? 0); - hash = hash * 23 + status.GetHashCode(); - hash = hash * 23 + staged.GetHashCode(); + hash = hash * 23 + indexStatus.GetHashCode(); + hash = hash * 23 + workTreeStatus.GetHashCode(); return hash; } @@ -55,8 +55,8 @@ public bool Equals(GitStatusEntry other) String.Equals(fullPath, other.fullPath) && String.Equals(projectPath, other.projectPath) && String.Equals(originalPath, other.originalPath) && - status == other.status && - staged == other.staged + indexStatus == other.indexStatus && + workTreeStatus == other.workTreeStatus ; } @@ -79,6 +79,49 @@ public bool Equals(GitStatusEntry other) return !(lhs == rhs); } + public static GitFileStatus ParseStatusMarker(char changeFlag) + { + GitFileStatus status = GitFileStatus.None; + switch (changeFlag) + { + case 'M': + status = GitFileStatus.Modified; + break; + case 'A': + status = GitFileStatus.Added; + break; + case 'D': + status = GitFileStatus.Deleted; + break; + case 'R': + status = GitFileStatus.Renamed; + break; + case 'C': + status = GitFileStatus.Copied; + break; + case 'U': + status = GitFileStatus.Unmerged; + break; + case 'T': + status = GitFileStatus.TypeChange; + break; + case 'X': + status = GitFileStatus.Unknown; + break; + case 'B': + status = GitFileStatus.Broken; + break; + case '?': + status = GitFileStatus.Untracked; + break; + case '!': + status = GitFileStatus.Ignored; + break; + default: break; + } + return status; + } + public string Path => path; public string FullPath => fullPath; @@ -87,13 +130,21 @@ public bool Equals(GitStatusEntry other) public string OriginalPath => originalPath; - public GitFileStatus Status => status; + public GitFileStatus Status => workTreeStatus != GitFileStatus.None ? workTreeStatus : indexStatus; + public GitFileStatus IndexStatus => indexStatus; + public GitFileStatus WorkTreeStatus => workTreeStatus; + + public bool Staged => indexStatus != GitFileStatus.None && !Unmerged && !Untracked && !Ignored; + + public bool Unmerged => (indexStatus == workTreeStatus && (indexStatus == GitFileStatus.Added || indexStatus == GitFileStatus.Deleted)) || + indexStatus == GitFileStatus.Unmerged || workTreeStatus == GitFileStatus.Unmerged; - public bool Staged => staged; + public bool Untracked => workTreeStatus == GitFileStatus.Untracked; + public bool Ignored => workTreeStatus == GitFileStatus.Ignored; public override string ToString() { - return $"Path:'{Path}' Status:'{Status}' FullPath:'{FullPath}' ProjectPath:'{ProjectPath}' OriginalPath:'{OriginalPath}' Staged:'{Staged}'"; + return $"Path:'{Path}' Status:'{Status}' FullPath:'{FullPath}' ProjectPath:'{ProjectPath}' OriginalPath:'{OriginalPath}' Staged:'{Staged}' Unmerged:'{Unmerged}' Status:'{IndexStatus}' Status:'{WorkTreeStatus}' "; } } } diff --git a/src/GitHub.Api/Git/IGitObjectFactory.cs b/src/GitHub.Api/Git/IGitObjectFactory.cs index 7d4e42bc9..7458c9403 100644 --- a/src/GitHub.Api/Git/IGitObjectFactory.cs +++ b/src/GitHub.Api/Git/IGitObjectFactory.cs @@ -1,7 +1,7 @@ namespace GitHub.Unity { - interface IGitObjectFactory + public interface IGitObjectFactory { - GitStatusEntry CreateGitStatusEntry(string path, GitFileStatus status, string originalPath = null, bool staged = false); + GitStatusEntry CreateGitStatusEntry(string path, GitFileStatus indexStatus, GitFileStatus workTreeStatus, string originalPath = null); } } diff --git a/src/GitHub.Api/Git/IRepository.cs b/src/GitHub.Api/Git/IRepository.cs index 148015282..7bd579d37 100644 --- a/src/GitHub.Api/Git/IRepository.cs +++ b/src/GitHub.Api/Git/IRepository.cs @@ -21,6 +21,7 @@ public interface IRepository : IEquatable, IDisposable, IBackedByCa ITask RequestLock(NPath file); ITask ReleaseLock(NPath file, bool force); ITask DiscardChanges(GitStatusEntry[] discardEntries); + ITask CheckoutVersion(string changeset, IList files); /// /// Gets the name of the repository. @@ -61,8 +62,10 @@ public interface IRepository : IEquatable, IDisposable, IBackedByCa List CurrentLog { get; } bool IsBusy { get; } string CurrentHead { get; } + GitFileLog CurrentFileLog { get; } event Action LogChanged; + event Action FileLogChanged; event Action TrackingStatusChanged; event Action StatusEntriesChanged; event Action CurrentBranchChanged; @@ -78,7 +81,8 @@ public interface IRepository : IEquatable, IDisposable, IBackedByCa ITask DeleteBranch(string branch, bool force); ITask CreateBranch(string branch, string baseBranch); ITask SwitchBranch(string branch); + ITask UpdateFileLog(string path); void Refresh(CacheType cacheType); event Action OnProgress; } -} \ No newline at end of file +} diff --git a/src/GitHub.Api/Git/Repository.cs b/src/GitHub.Api/Git/Repository.cs index f9d6fe38e..84cd58853 100644 --- a/src/GitHub.Api/Git/Repository.cs +++ b/src/GitHub.Api/Git/Repository.cs @@ -13,7 +13,7 @@ public interface IBackedByCache } [DebuggerDisplay("{DebuggerDisplay,nq}")] - sealed class Repository : IEquatable, IRepository + public class Repository : IEquatable, IRepository { private static ILogging Logger = LogHelper.GetLogger(); @@ -25,8 +25,10 @@ sealed class Repository : IEquatable, IRepository private HashSet cacheInvalidationRequests = new HashSet(); private Dictionary> cacheUpdateEvents; private ProgressReporter progressReporter = new ProgressReporter(); + private string lastFileLog; public event Action LogChanged; + public event Action FileLogChanged; public event Action TrackingStatusChanged; public event Action StatusEntriesChanged; public event Action CurrentBranchChanged; @@ -63,6 +65,7 @@ public Repository(NPath localPath, ICacheContainer container) { CacheType.GitAheadBehind, c => TrackingStatusChanged?.Invoke(c) }, { CacheType.GitLocks, c => LocksChanged?.Invoke(c) }, { CacheType.GitLog, c => LogChanged?.Invoke(c) }, + { CacheType.GitFileLog, c => FileLogChanged?.Invoke(c) }, { CacheType.GitStatus, c => StatusEntriesChanged?.Invoke(c) }, { CacheType.GitUser, cacheUpdateEvent => { } }, { CacheType.RepositoryInfo, cacheUpdateEvent => { @@ -91,6 +94,7 @@ public void Initialize(IRepositoryManager theRepositoryManager, ITaskManager the this.repositoryManager.GitStatusUpdated += RepositoryManagerOnGitStatusUpdated; this.repositoryManager.GitAheadBehindStatusUpdated += RepositoryManagerOnGitAheadBehindStatusUpdated; this.repositoryManager.GitLogUpdated += RepositoryManagerOnGitLogUpdated; + this.repositoryManager.GitFileLogUpdated += RepositoryManagerOnGitFileLogUpdated; this.repositoryManager.GitLocksUpdated += RepositoryManagerOnGitLocksUpdated; this.repositoryManager.LocalBranchesUpdated += RepositoryManagerOnLocalBranchesUpdated; this.repositoryManager.RemoteBranchesUpdated += RepositoryManagerOnRemoteBranchesUpdated; @@ -138,11 +142,17 @@ public ITask SetupRemote(string remote, string remoteUrl) public ITask RequestLock(NPath file) => repositoryManager.LockFile(file); public ITask ReleaseLock(NPath file, bool force) => repositoryManager.UnlockFile(file, force); public ITask DiscardChanges(GitStatusEntry[] gitStatusEntry) => repositoryManager.DiscardChanges(gitStatusEntry); + public ITask CheckoutVersion(string changeset, IList files) => repositoryManager.CheckoutVersion(changeset, files); public ITask RemoteAdd(string remote, string url) => repositoryManager.RemoteAdd(remote, url); public ITask RemoteRemove(string remote) => repositoryManager.RemoteRemove(remote); public ITask DeleteBranch(string branch, bool force) => repositoryManager.DeleteBranch(branch, force); public ITask CreateBranch(string branch, string baseBranch) => repositoryManager.CreateBranch(branch, baseBranch); public ITask SwitchBranch(string branch) => repositoryManager.SwitchBranch(branch); + public ITask UpdateFileLog(string path) + { + lastFileLog = path; + return repositoryManager.UpdateFileLog(path); + } public void CheckAndRaiseEventsIfCacheNewer(CacheType cacheType, CacheUpdateEvent cacheUpdateEvent) => cacheContainer.CheckAndRaiseEventsIfCacheNewer(cacheType, cacheUpdateEvent); @@ -188,6 +198,12 @@ public void Refresh(CacheType cacheType) { var cache = cacheContainer.GetCache(cacheType); cache.InvalidateData(); + + // take the opportunity to possibly refresh the locks cache, if it has timed out + if (cacheType != CacheType.GitLocks) + { + cacheContainer.GetCache(CacheType.GitLocks).ValidateData(); + } } private void CacheHasBeenInvalidated(CacheType cacheType) @@ -202,20 +218,26 @@ private void CacheHasBeenInvalidated(CacheType cacheType) switch (cacheType) { case CacheType.Branches: - repositoryManager?.UpdateBranches().Start(); + repositoryManager?.UpdateBranches().Catch(ex => InvalidationFailed(ex, cacheType)).Start(); break; case CacheType.GitLog: - repositoryManager?.UpdateGitLog().Start(); + repositoryManager?.UpdateGitLog().Catch(ex => InvalidationFailed(ex, cacheType)).Start(); + break; + + case CacheType.GitFileLog: + repositoryManager?.UpdateFileLog(lastFileLog).Catch(ex => InvalidationFailed(ex, cacheType)).Start(); break; case CacheType.GitAheadBehind: - repositoryManager?.UpdateGitAheadBehindStatus().Start(); + repositoryManager?.UpdateGitAheadBehindStatus().Catch(ex => InvalidationFailed(ex, cacheType)).Start(); break; case CacheType.GitLocks: if (CurrentRemote != null) - repositoryManager?.UpdateLocks().Start(); + { + repositoryManager?.UpdateLocks().Catch(ex => InvalidationFailed(ex, cacheType)).Start(); + } break; case CacheType.GitUser: @@ -223,11 +245,11 @@ private void CacheHasBeenInvalidated(CacheType cacheType) break; case CacheType.RepositoryInfo: - repositoryManager?.UpdateRepositoryInfo().Start(); + repositoryManager?.UpdateRepositoryInfo().Catch(ex => InvalidationFailed(ex, cacheType)).Start(); break; case CacheType.GitStatus: - repositoryManager?.UpdateGitStatus().Start(); + repositoryManager?.UpdateGitStatus().Catch(ex => InvalidationFailed(ex, cacheType)).Start(); break; default: @@ -235,20 +257,29 @@ private void CacheHasBeenInvalidated(CacheType cacheType) } } + private bool InvalidationFailed(Exception ex, CacheType cacheType) + { + Logger.Warning(ex, "Error invalidating {0}", cacheType); + var managedCache = cacheContainer.GetCache(cacheType); + managedCache.ResetInvalidation(); + return false; + } + + private void RepositoryManagerOnCurrentBranchUpdated(ConfigBranch? branch, ConfigRemote? remote, string head) { taskManager.RunInUI(() => { var data = new RepositoryInfoCacheData(); data.CurrentConfigBranch = branch; - data.CurrentGitBranch = branch.HasValue ? (GitBranch?)GetLocalGitBranch(branch.Value.name, branch.Value) : null; + data.CurrentGitBranch = branch.HasValue ? (GitBranch?)GetLocalGitBranch(branch.Value) : null; data.CurrentConfigRemote = remote; data.CurrentGitRemote = remote.HasValue ? (GitRemote?)GetGitRemote(remote.Value) : null; data.CurrentHead = head; name = null; cloneUrl = null; cacheContainer.RepositoryInfoCache.UpdateData(data); - + // force refresh of the Name and CloneUrl propertys var n = Name; }); @@ -278,6 +309,11 @@ private void RepositoryManagerOnGitLogUpdated(List gitLogEntries) taskManager.RunInUI(() => cacheContainer.GitLogCache.Log = gitLogEntries); } + private void RepositoryManagerOnGitFileLogUpdated(GitFileLog gitFileLog) + { + taskManager.RunInUI(() => cacheContainer.GitFileLogCache.FileLog = gitFileLog); + } + private void RepositoryManagerOnGitLocksUpdated(List gitLocks) { taskManager.RunInUI(() => cacheContainer.GitLocksCache.GitLocks = gitLocks); @@ -297,15 +333,15 @@ private void RepositoryManagerOnRemoteBranchesUpdated(Dictionary localConfigBranchDictionary) { taskManager.RunInUI(() => { - var gitLocalBranches = localConfigBranchDictionary.Values.Select(x => GetLocalGitBranch(CurrentBranchName, x)).ToArray(); + var gitLocalBranches = localConfigBranchDictionary.Values.Select(x => GetLocalGitBranch(x)).ToArray(); cacheContainer.BranchCache.SetLocals(localConfigBranchDictionary, gitLocalBranches); }); } - private static GitBranch GetLocalGitBranch(string currentBranchName, ConfigBranch x) + private static GitBranch GetLocalGitBranch(ConfigBranch x) { var branchName = x.Name; - var trackingName = x.IsTracking ? x.Remote.Value.Name + "/" + branchName : "[None]"; + var trackingName = x.IsTracking ? x.Remote.Value.Name + "/" + branchName : null; return new GitBranch(branchName, trackingName); } @@ -343,6 +379,7 @@ public void Dispose() public string CurrentBranchName => CurrentConfigBranch?.Name; public GitRemote? CurrentRemote => cacheContainer.RepositoryInfoCache.CurrentGitRemote; public List CurrentLog => cacheContainer.GitLogCache.Log; + public GitFileLog CurrentFileLog => cacheContainer.GitFileLogCache.FileLog; public List CurrentLocks => cacheContainer.GitLocksCache.GitLocks; public string CurrentHead => cacheContainer.RepositoryInfoCache.CurrentHead; @@ -417,9 +454,12 @@ public class User : IUser public User(ICacheContainer cacheContainer) { - this.cacheContainer = cacheContainer; - cacheContainer.CacheInvalidated += (type) => { if (type == CacheType.GitUser) GitUserCacheOnCacheInvalidated(); }; - cacheContainer.CacheUpdated += (type, dt) => { if (type == CacheType.GitUser) CacheHasBeenUpdated(dt); }; + if (cacheContainer != null) + { + this.cacheContainer = cacheContainer; + cacheContainer.CacheInvalidated += (type) => { if (type == CacheType.GitUser) GitUserCacheOnCacheInvalidated(); }; + cacheContainer.CacheUpdated += (type, dt) => { if (type == CacheType.GitUser) CacheHasBeenUpdated(dt); }; + } } public void CheckAndRaiseEventsIfCacheNewer(CacheType cacheType, CacheUpdateEvent cacheUpdateEvent) => cacheContainer.CheckAndRaiseEventsIfCacheNewer(CacheType.GitUser, cacheUpdateEvent); @@ -476,6 +516,7 @@ private void UpdateUserAndEmail() } gitClient.GetConfigUserAndEmail() + .Catch(InvalidationFailed) .ThenInUI((success, value) => { if (success) @@ -485,7 +526,15 @@ private void UpdateUserAndEmail() } }).Start(); } - + + private bool InvalidationFailed(Exception ex) + { + Logger.Warning(ex, "Error invalidating user cache"); + var managedCache = cacheContainer.GetCache(CacheType.GitUser); + managedCache.ResetInvalidation(); + return false; + } + public string Name { get { return cacheContainer.GitUserCache.Name; } diff --git a/src/GitHub.Api/Git/RepositoryManager.cs b/src/GitHub.Api/Git/RepositoryManager.cs index 63c08ea49..7b2045b43 100644 --- a/src/GitHub.Api/Git/RepositoryManager.cs +++ b/src/GitHub.Api/Git/RepositoryManager.cs @@ -13,6 +13,7 @@ public interface IRepositoryManager : IDisposable event Action GitStatusUpdated; event Action> GitLocksUpdated; event Action> GitLogUpdated; + event Action GitFileLogUpdated; event Action> LocalBranchesUpdated; event Action, Dictionary>> RemoteBranchesUpdated; event Action GitAheadBehindStatusUpdated; @@ -37,12 +38,15 @@ public interface IRepositoryManager : IDisposable ITask LockFile(NPath file); ITask UnlockFile(NPath file, bool force); ITask DiscardChanges(GitStatusEntry[] gitStatusEntries); + ITask CheckoutVersion(string changeset, IList files); ITask UpdateGitLog(); ITask UpdateGitStatus(); ITask UpdateGitAheadBehindStatus(); ITask UpdateLocks(); ITask UpdateRepositoryInfo(); ITask UpdateBranches(); + ITask UpdateFileLog(string path); + int WaitForEvents(); @@ -51,7 +55,7 @@ public interface IRepositoryManager : IDisposable bool IsBusy { get; } } - interface IRepositoryPathConfiguration + public interface IRepositoryPathConfiguration { NPath RepositoryPath { get; } NPath DotGitPath { get; } @@ -60,15 +64,19 @@ interface IRepositoryPathConfiguration NPath DotGitIndex { get; } NPath DotGitHead { get; } NPath DotGitConfig { get; } + NPath WorktreeDotGitPath { get; } + bool IsWorktree { get; } } - class RepositoryPathConfiguration : IRepositoryPathConfiguration + public class RepositoryPathConfiguration : IRepositoryPathConfiguration { public RepositoryPathConfiguration(NPath repositoryPath) { RepositoryPath = repositoryPath; + WorktreeDotGitPath = NPath.Default; DotGitPath = repositoryPath.Combine(".git"); + NPath commonPath; if (DotGitPath.FileExists()) { DotGitPath = @@ -76,17 +84,37 @@ public RepositoryPathConfiguration(NPath repositoryPath) .Where(x => x.StartsWith("gitdir:")) .Select(x => x.Substring(7).Trim().ToNPath()) .First(); + if (DotGitPath.Combine("commondir").FileExists()) + { + commonPath = DotGitPath.Combine("commondir").ReadAllLines() + .Select(x => x.Trim().ToNPath()) + .First(); + commonPath = DotGitPath.Combine(commonPath); + + IsWorktree = true; + WorktreeDotGitPath = commonPath; + } + else + { + commonPath = DotGitPath; + } + } + else + { + commonPath = DotGitPath; } - BranchesPath = DotGitPath.Combine("refs", "heads"); - RemotesPath = DotGitPath.Combine("refs", "remotes"); + BranchesPath = commonPath.Combine("refs", "heads"); + RemotesPath = commonPath.Combine("refs", "remotes"); DotGitIndex = DotGitPath.Combine("index"); DotGitHead = DotGitPath.Combine("HEAD"); - DotGitConfig = DotGitPath.Combine("config"); + DotGitConfig = commonPath.Combine("config"); DotGitCommitEditMsg = DotGitPath.Combine("COMMIT_EDITMSG"); } + public bool IsWorktree { get; } public NPath RepositoryPath { get; } + public NPath WorktreeDotGitPath { get; } public NPath DotGitPath { get; } public NPath BranchesPath { get; } public NPath RemotesPath { get; } @@ -96,7 +124,7 @@ public RepositoryPathConfiguration(NPath repositoryPath) public NPath DotGitCommitEditMsg { get; } } - class RepositoryManager : IRepositoryManager + public class RepositoryManager : IRepositoryManager { private readonly IGitConfig config; private readonly IGitClient gitClient; @@ -112,6 +140,7 @@ class RepositoryManager : IRepositoryManager public event Action GitAheadBehindStatusUpdated; public event Action> GitLocksUpdated; public event Action> GitLogUpdated; + public event Action GitFileLogUpdated; public event Action> LocalBranchesUpdated; public event Action, Dictionary>> RemoteBranchesUpdated; @@ -288,7 +317,7 @@ public ITask DiscardChanges(GitStatusEntry[] gitStatusEntries) foreach (var gitStatusEntry in gitStatusEntries) { - if (gitStatusEntry.status == GitFileStatus.Added || gitStatusEntry.status == GitFileStatus.Untracked) + if (gitStatusEntry.WorkTreeStatus == GitFileStatus.Added || gitStatusEntry.WorkTreeStatus == GitFileStatus.Untracked) { itemsToDelete.Add(gitStatusEntry.path.ToNPath().MakeAbsolute()); } @@ -317,6 +346,13 @@ public ITask DiscardChanges(GitStatusEntry[] gitStatusEntries) return HookupHandlers(task, true); } + public ITask CheckoutVersion(string changeset, IList files) + { + var task = GitClient.CheckoutVersion(changeset, files) + .Then(() => DataNeedsRefreshing?.Invoke(CacheType.GitStatus)); + return HookupHandlers(task, false); + } + public ITask UpdateGitLog() { var task = GitClient.Log() @@ -330,6 +366,20 @@ public ITask UpdateGitLog() return HookupHandlers(task, false); } + public ITask UpdateFileLog(string path) + { + var task = GitClient.LogFile(path) + .Then((success, logEntries) => + { + if (success) + { + var gitFileLog = new GitFileLog(path, logEntries); + GitFileLogUpdated?.Invoke(gitFileLog); + } + }); + return HookupHandlers(task, false); + } + public ITask UpdateGitStatus() { var task = GitClient.Status() @@ -620,6 +670,7 @@ private void Dispose(bool disposing) GitStatusUpdated = null; GitAheadBehindStatusUpdated = null; GitLogUpdated = null; + GitFileLogUpdated = null; GitLocksUpdated = null; LocalBranchesUpdated = null; RemoteBranchesUpdated = null; diff --git a/src/GitHub.Api/Git/Tasks/GitAddTask.cs b/src/GitHub.Api/Git/Tasks/GitAddTask.cs index bcb1baf74..a9c277ad3 100644 --- a/src/GitHub.Api/Git/Tasks/GitAddTask.cs +++ b/src/GitHub.Api/Git/Tasks/GitAddTask.cs @@ -1,10 +1,9 @@ -using System; using System.Collections.Generic; using System.Threading; -namespace GitHub.Unity +namespace GitHub.Unity.Git.Tasks { - class GitAddTask : ProcessTask + public class GitAddTask : ProcessTask { private const string TaskName = "git add"; private readonly string arguments; diff --git a/src/GitHub.Api/Git/Tasks/GitAheadBehindStatusTask.cs b/src/GitHub.Api/Git/Tasks/GitAheadBehindStatusTask.cs index 50635e963..2a18cccb1 100644 --- a/src/GitHub.Api/Git/Tasks/GitAheadBehindStatusTask.cs +++ b/src/GitHub.Api/Git/Tasks/GitAheadBehindStatusTask.cs @@ -1,8 +1,8 @@ using System.Threading; -namespace GitHub.Unity +namespace GitHub.Unity.Git.Tasks { - class GitAheadBehindStatusTask : ProcessTask + public class GitAheadBehindStatusTask : ProcessTask { private const string TaskName = "git rev-list"; private readonly string arguments; @@ -19,4 +19,4 @@ public GitAheadBehindStatusTask(string gitRef, string otherRef, public override TaskAffinity Affinity { get { return TaskAffinity.Exclusive; } } public override string Message { get; set; } = "Querying status..."; } -} \ No newline at end of file +} diff --git a/src/GitHub.Api/Git/Tasks/GitBranchCreateTask.cs b/src/GitHub.Api/Git/Tasks/GitBranchCreateTask.cs index a225c53b5..1ef33ddbd 100644 --- a/src/GitHub.Api/Git/Tasks/GitBranchCreateTask.cs +++ b/src/GitHub.Api/Git/Tasks/GitBranchCreateTask.cs @@ -1,9 +1,9 @@ using System; using System.Threading; -namespace GitHub.Unity +namespace GitHub.Unity.Git.Tasks { - class GitBranchCreateTask : ProcessTask + public class GitBranchCreateTask : ProcessTask { private const string TaskName = "git branch"; private readonly string arguments; diff --git a/src/GitHub.Api/Git/Tasks/GitBranchDeleteTask.cs b/src/GitHub.Api/Git/Tasks/GitBranchDeleteTask.cs index 25b61fe8e..70bc9b596 100644 --- a/src/GitHub.Api/Git/Tasks/GitBranchDeleteTask.cs +++ b/src/GitHub.Api/Git/Tasks/GitBranchDeleteTask.cs @@ -1,8 +1,8 @@ using System.Threading; -namespace GitHub.Unity +namespace GitHub.Unity.Git.Tasks { - class GitBranchDeleteTask : ProcessTask + public class GitBranchDeleteTask : ProcessTask { private const string TaskName = "git branch -d"; private readonly string arguments; diff --git a/src/GitHub.Api/Git/Tasks/GitCheckoutTask.cs b/src/GitHub.Api/Git/Tasks/GitCheckoutTask.cs index ea2e9f4d3..582b8458d 100644 --- a/src/GitHub.Api/Git/Tasks/GitCheckoutTask.cs +++ b/src/GitHub.Api/Git/Tasks/GitCheckoutTask.cs @@ -1,10 +1,9 @@ -using System; -using System.Collections.Generic; +using System.Collections.Generic; using System.Threading; -namespace GitHub.Unity +namespace GitHub.Unity.Git.Tasks { - class GitCheckoutTask : ProcessTask + public class GitCheckoutTask : ProcessTask { private const string TaskName = "git checkout"; private readonly string arguments; @@ -30,8 +29,29 @@ public GitCheckoutTask(CancellationToken token, arguments = "checkout -- ."; } + public GitCheckoutTask( + string changeset, + IEnumerable files, + CancellationToken token, + IOutputProcessor processor = null) : base(token, processor ?? new SimpleOutputProcessor()) + { + Guard.ArgumentNotNull(files, "files"); + Name = TaskName; + + arguments = "checkout "; + arguments += changeset; + arguments += " -- "; + + foreach (var file in files) + { + arguments += " \"" + file.ToNPath().ToString(SlashMode.Forward) + "\""; + } + + Message = "Checking out files at rev " + changeset.Substring(0, 7); + } + public override string ProcessArguments { get { return arguments; } } public override TaskAffinity Affinity { get { return TaskAffinity.Exclusive; } } - public override string Message { get; set; } = "Checking out branch..."; + public override string Message { get; set; } = "Checking out files..."; } -} \ No newline at end of file +} diff --git a/src/GitHub.Api/Git/Tasks/GitCommitTask.cs b/src/GitHub.Api/Git/Tasks/GitCommitTask.cs index 75d53de33..e36e4c208 100644 --- a/src/GitHub.Api/Git/Tasks/GitCommitTask.cs +++ b/src/GitHub.Api/Git/Tasks/GitCommitTask.cs @@ -1,9 +1,9 @@ using System; using System.Threading; -namespace GitHub.Unity +namespace GitHub.Unity.Git.Tasks { - class GitCommitTask : ProcessTask + public class GitCommitTask : ProcessTask { private const string TaskName = "git commit"; diff --git a/src/GitHub.Api/Git/Tasks/GitConfigGetTask.cs b/src/GitHub.Api/Git/Tasks/GitConfigGetTask.cs index 402c000ac..3833d465a 100644 --- a/src/GitHub.Api/Git/Tasks/GitConfigGetTask.cs +++ b/src/GitHub.Api/Git/Tasks/GitConfigGetTask.cs @@ -1,9 +1,9 @@ using System; using System.Threading; -namespace GitHub.Unity +namespace GitHub.Unity.Git.Tasks { - class GitConfigGetAllTask : ProcessTaskWithListOutput + public class GitConfigGetAllTask : ProcessTaskWithListOutput { private const string TaskName = "git config get"; private readonly string arguments; diff --git a/src/GitHub.Api/Git/Tasks/GitConfigListTask.cs b/src/GitHub.Api/Git/Tasks/GitConfigListTask.cs index 1ecb999f6..4935b5864 100644 --- a/src/GitHub.Api/Git/Tasks/GitConfigListTask.cs +++ b/src/GitHub.Api/Git/Tasks/GitConfigListTask.cs @@ -2,9 +2,9 @@ using System.Collections.Generic; using System.Threading; -namespace GitHub.Unity +namespace GitHub.Unity.Git.Tasks { - class GitConfigListTask : ProcessTaskWithListOutput> + public class GitConfigListTask : ProcessTaskWithListOutput> { private const string TaskName = "git config list"; private readonly string arguments; @@ -30,4 +30,4 @@ public GitConfigListTask(GitConfigSource configSource, CancellationToken token, public override TaskAffinity Affinity { get { return TaskAffinity.Exclusive; } } public override string Message { get; set; } = "Reading configuration..."; } -} \ No newline at end of file +} diff --git a/src/GitHub.Api/Git/Tasks/GitConfigSetTask.cs b/src/GitHub.Api/Git/Tasks/GitConfigSetTask.cs index d08ded87d..369f5150c 100644 --- a/src/GitHub.Api/Git/Tasks/GitConfigSetTask.cs +++ b/src/GitHub.Api/Git/Tasks/GitConfigSetTask.cs @@ -1,9 +1,9 @@ using System; using System.Threading; -namespace GitHub.Unity +namespace GitHub.Unity.Git.Tasks { - class GitConfigSetTask : ProcessTask + public class GitConfigSetTask : ProcessTask { private readonly string arguments; @@ -25,4 +25,4 @@ public GitConfigSetTask(string key, string value, GitConfigSource configSource, public override TaskAffinity Affinity { get { return TaskAffinity.Exclusive; } } public override string Message { get; set; } = "Writing configuration..."; } -} \ No newline at end of file +} diff --git a/src/GitHub.Api/Git/Tasks/GitConfigUnSetTask.cs b/src/GitHub.Api/Git/Tasks/GitConfigUnSetTask.cs new file mode 100644 index 000000000..808e97193 --- /dev/null +++ b/src/GitHub.Api/Git/Tasks/GitConfigUnSetTask.cs @@ -0,0 +1,28 @@ +using System; +using System.Threading; + +namespace GitHub.Unity.Git.Tasks +{ + public class GitConfigUnSetTask : ProcessTask + { + private readonly string arguments; + + public GitConfigUnSetTask(string key, GitConfigSource configSource, + CancellationToken token, IOutputProcessor processor = null) + : base(token, processor ?? new SimpleOutputProcessor()) + { + var source = ""; + source += + configSource == GitConfigSource.NonSpecified ? "--unset" : + configSource == GitConfigSource.Local ? "--local --unset" : + configSource == GitConfigSource.User ? "--global --unset" : + "--system --unset"; + arguments = String.Format("config {0} {1}", source, key); + Name = String.Format("config {0} {1}", source, key); + } + + public override string ProcessArguments { get { return arguments; } } + public override TaskAffinity Affinity { get { return TaskAffinity.Exclusive; } } + public override string Message { get; set; } = "Writing configuration..."; + } +} diff --git a/src/GitHub.Api/Git/Tasks/GitCountObjectsTask.cs b/src/GitHub.Api/Git/Tasks/GitCountObjectsTask.cs index 6489e8574..c8e8b4ae8 100644 --- a/src/GitHub.Api/Git/Tasks/GitCountObjectsTask.cs +++ b/src/GitHub.Api/Git/Tasks/GitCountObjectsTask.cs @@ -1,8 +1,8 @@ using System.Threading; -namespace GitHub.Unity +namespace GitHub.Unity.Git.Tasks { - class GitCountObjectsTask : ProcessTask + public class GitCountObjectsTask : ProcessTask { private const string TaskName = "git count-objects"; diff --git a/src/GitHub.Api/Git/Tasks/GitFetchTask.cs b/src/GitHub.Api/Git/Tasks/GitFetchTask.cs index 39a6421bb..445758422 100644 --- a/src/GitHub.Api/Git/Tasks/GitFetchTask.cs +++ b/src/GitHub.Api/Git/Tasks/GitFetchTask.cs @@ -2,9 +2,9 @@ using System.Collections.Generic; using System.Threading; -namespace GitHub.Unity +namespace GitHub.Unity.Git.Tasks { - class GitFetchTask : ProcessTask + public class GitFetchTask : ProcessTask { private const string TaskName = "git fetch"; private readonly string arguments; diff --git a/src/GitHub.Api/Git/Tasks/GitInitTask.cs b/src/GitHub.Api/Git/Tasks/GitInitTask.cs index 679f807df..e47f7b0ab 100644 --- a/src/GitHub.Api/Git/Tasks/GitInitTask.cs +++ b/src/GitHub.Api/Git/Tasks/GitInitTask.cs @@ -1,8 +1,8 @@ using System.Threading; -namespace GitHub.Unity +namespace GitHub.Unity.Git.Tasks { - class GitInitTask : ProcessTask + public class GitInitTask : ProcessTask { private const string TaskName = "git init"; diff --git a/src/GitHub.Api/Git/Tasks/GitLfsInstallTask.cs b/src/GitHub.Api/Git/Tasks/GitLfsInstallTask.cs index 68f5f9d19..6cf4f89c6 100644 --- a/src/GitHub.Api/Git/Tasks/GitLfsInstallTask.cs +++ b/src/GitHub.Api/Git/Tasks/GitLfsInstallTask.cs @@ -1,8 +1,8 @@ using System.Threading; -namespace GitHub.Unity +namespace GitHub.Unity.Git.Tasks { - class GitLfsInstallTask : ProcessTask + public class GitLfsInstallTask : ProcessTask { private const string TaskName = "git lsf install"; @@ -16,4 +16,4 @@ public GitLfsInstallTask(CancellationToken token, IOutputProcessor proce public override TaskAffinity Affinity { get { return TaskAffinity.Exclusive; } } public override string Message { get; set; } = "Initializing LFS..."; } -} \ No newline at end of file +} diff --git a/src/GitHub.Api/Git/Tasks/GitLfsVersionTask.cs b/src/GitHub.Api/Git/Tasks/GitLfsVersionTask.cs index 444dcf972..dad8f566c 100644 --- a/src/GitHub.Api/Git/Tasks/GitLfsVersionTask.cs +++ b/src/GitHub.Api/Git/Tasks/GitLfsVersionTask.cs @@ -1,9 +1,8 @@ -using System; using System.Threading; -namespace GitHub.Unity +namespace GitHub.Unity.Git.Tasks { - class GitLfsVersionTask : ProcessTask + public class GitLfsVersionTask : ProcessTask { private const string TaskName = "git lfs version"; @@ -17,4 +16,4 @@ public GitLfsVersionTask(CancellationToken token, IOutputProcessor p public override TaskAffinity Affinity { get { return TaskAffinity.Concurrent; } } public override string Message { get; set; } = "Reading LFS version..."; } -} \ No newline at end of file +} diff --git a/src/GitHub.Api/Git/Tasks/GitListBranchesTask.cs b/src/GitHub.Api/Git/Tasks/GitListBranchesTask.cs index afa0c94e0..58c1aae56 100644 --- a/src/GitHub.Api/Git/Tasks/GitListBranchesTask.cs +++ b/src/GitHub.Api/Git/Tasks/GitListBranchesTask.cs @@ -1,8 +1,8 @@ using System.Threading; -namespace GitHub.Unity +namespace GitHub.Unity.Git.Tasks { - class GitListLocalBranchesTask : ProcessTaskWithListOutput + public class GitListLocalBranchesTask : ProcessTaskWithListOutput { private const string TaskName = "git list local branches"; private const string Arguments = "branch -vv"; diff --git a/src/GitHub.Api/Git/Tasks/GitListLocksTask.cs b/src/GitHub.Api/Git/Tasks/GitListLocksTask.cs index 134cb78b2..6aadd5198 100644 --- a/src/GitHub.Api/Git/Tasks/GitListLocksTask.cs +++ b/src/GitHub.Api/Git/Tasks/GitListLocksTask.cs @@ -1,8 +1,8 @@ using System.Threading; -namespace GitHub.Unity +namespace GitHub.Unity.Git.Tasks { - class GitListLocksTask : ProcessTaskWithListOutput + public class GitListLocksTask : ProcessTaskWithListOutput { private const string TaskName = "git lfs locks"; private readonly string args; diff --git a/src/GitHub.Api/Git/Tasks/GitLockTask.cs b/src/GitHub.Api/Git/Tasks/GitLockTask.cs index e77b371ce..c51004553 100644 --- a/src/GitHub.Api/Git/Tasks/GitLockTask.cs +++ b/src/GitHub.Api/Git/Tasks/GitLockTask.cs @@ -1,9 +1,9 @@ using System; using System.Threading; -namespace GitHub.Unity +namespace GitHub.Unity.Git.Tasks { - class GitLockTask : ProcessTask + public class GitLockTask : ProcessTask { private const string TaskName = "git lfs lock"; private readonly string arguments; diff --git a/src/GitHub.Api/Git/Tasks/GitLogTask.cs b/src/GitHub.Api/Git/Tasks/GitLogTask.cs index 955521a61..ace25072a 100644 --- a/src/GitHub.Api/Git/Tasks/GitLogTask.cs +++ b/src/GitHub.Api/Git/Tasks/GitLogTask.cs @@ -1,21 +1,54 @@ using System.Threading; -namespace GitHub.Unity +namespace GitHub.Unity.Git.Tasks { - class GitLogTask : ProcessTaskWithListOutput + public class GitLogTask : ProcessTaskWithListOutput { private const string TaskName = "git log"; + private const string baseArguments = @"-c i18n.logoutputencoding=utf8 -c core.quotepath=false log --pretty=format:""%H%n%P%n%aN%n%aE%n%aI%n%cN%n%cE%n%cI%n%B---GHUBODYEND---"" --name-status"; + private readonly string arguments; public GitLogTask(IGitObjectFactory gitObjectFactory, + CancellationToken token, + BaseOutputListProcessor processor = null) + : this(0, gitObjectFactory, token, processor) + { + } + + public GitLogTask(string file, + IGitObjectFactory gitObjectFactory, + CancellationToken token, BaseOutputListProcessor processor = null) + : this(file, 0, gitObjectFactory, token, processor) + { + } + + public GitLogTask(int numberOfCommits, IGitObjectFactory gitObjectFactory, + CancellationToken token, + BaseOutputListProcessor processor = null) + : base(token, processor ?? new LogEntryOutputProcessor(gitObjectFactory)) + { + Name = TaskName; + arguments = baseArguments; + if (numberOfCommits > 0) + arguments += " -n " + numberOfCommits; + } + + public GitLogTask(string file, int numberOfCommits, + IGitObjectFactory gitObjectFactory, CancellationToken token, BaseOutputListProcessor processor = null) : base(token, processor ?? new LogEntryOutputProcessor(gitObjectFactory)) { Name = TaskName; + arguments = baseArguments; + if (numberOfCommits > 0) + arguments += " -n " + numberOfCommits; + arguments += " -- "; + arguments += " \"" + file + "\""; } public override string ProcessArguments { - get { return @"-c i18n.logoutputencoding=utf8 -c core.quotepath=false log --pretty=format:""%H%n%P%n%aN%n%aE%n%aI%n%cN%n%cE%n%cI%n%B---GHUBODYEND---"" --name-status"; } + get { return arguments; } } public override string Message { get; set; } = "Loading the history..."; } diff --git a/src/GitHub.Api/Git/Tasks/GitPullTask.cs b/src/GitHub.Api/Git/Tasks/GitPullTask.cs index 64006d950..768c4fc27 100644 --- a/src/GitHub.Api/Git/Tasks/GitPullTask.cs +++ b/src/GitHub.Api/Git/Tasks/GitPullTask.cs @@ -2,9 +2,9 @@ using System.Text; using System.Threading; -namespace GitHub.Unity +namespace GitHub.Unity.Git.Tasks { - class GitPullTask : ProcessTask + public class GitPullTask : ProcessTask { private const string TaskName = "git pull"; private readonly string arguments; diff --git a/src/GitHub.Api/Git/Tasks/GitPushTask.cs b/src/GitHub.Api/Git/Tasks/GitPushTask.cs index 7e786c940..4eb533b61 100644 --- a/src/GitHub.Api/Git/Tasks/GitPushTask.cs +++ b/src/GitHub.Api/Git/Tasks/GitPushTask.cs @@ -1,9 +1,9 @@ using System; using System.Threading; -namespace GitHub.Unity +namespace GitHub.Unity.Git.Tasks { - class GitPushTask : ProcessTask + public class GitPushTask : ProcessTask { private const string TaskName = "git push"; private readonly string arguments; diff --git a/src/GitHub.Api/Git/Tasks/GitRemoteAddTask.cs b/src/GitHub.Api/Git/Tasks/GitRemoteAddTask.cs index 5512c2b28..375793699 100644 --- a/src/GitHub.Api/Git/Tasks/GitRemoteAddTask.cs +++ b/src/GitHub.Api/Git/Tasks/GitRemoteAddTask.cs @@ -1,9 +1,9 @@ using System; using System.Threading; -namespace GitHub.Unity +namespace GitHub.Unity.Git.Tasks { - class GitRemoteAddTask : ProcessTask + public class GitRemoteAddTask : ProcessTask { private const string TaskName = "git remote add"; private readonly string arguments; diff --git a/src/GitHub.Api/Git/Tasks/GitRemoteBranchDeleteTask.cs b/src/GitHub.Api/Git/Tasks/GitRemoteBranchDeleteTask.cs index 92c91f3a7..88899a928 100644 --- a/src/GitHub.Api/Git/Tasks/GitRemoteBranchDeleteTask.cs +++ b/src/GitHub.Api/Git/Tasks/GitRemoteBranchDeleteTask.cs @@ -1,9 +1,9 @@ using System; using System.Threading; -namespace GitHub.Unity +namespace GitHub.Unity.Git.Tasks { - class GitRemoteBranchDeleteTask : ProcessTask + public class GitRemoteBranchDeleteTask : ProcessTask { private const string TaskName = "git push --delete"; private readonly string arguments; diff --git a/src/GitHub.Api/Git/Tasks/GitRemoteChangeTask.cs b/src/GitHub.Api/Git/Tasks/GitRemoteChangeTask.cs index 3428fdfa9..3b2ff7a2a 100644 --- a/src/GitHub.Api/Git/Tasks/GitRemoteChangeTask.cs +++ b/src/GitHub.Api/Git/Tasks/GitRemoteChangeTask.cs @@ -1,9 +1,9 @@ using System; using System.Threading; -namespace GitHub.Unity +namespace GitHub.Unity.Git.Tasks { - class GitRemoteChangeTask : ProcessTask + public class GitRemoteChangeTask : ProcessTask { private const string TaskName = "git remote set-url"; private readonly string arguments; @@ -23,4 +23,4 @@ public GitRemoteChangeTask(string remote, string url, public override TaskAffinity Affinity { get { return TaskAffinity.Exclusive; } } public override string Message { get; set; } = "Switching remotes..."; } -} \ No newline at end of file +} diff --git a/src/GitHub.Api/Git/Tasks/GitRemoteListTask.cs b/src/GitHub.Api/Git/Tasks/GitRemoteListTask.cs index 6a25561ec..f8eb75234 100644 --- a/src/GitHub.Api/Git/Tasks/GitRemoteListTask.cs +++ b/src/GitHub.Api/Git/Tasks/GitRemoteListTask.cs @@ -1,8 +1,8 @@ using System.Threading; -namespace GitHub.Unity +namespace GitHub.Unity.Git.Tasks { - class GitRemoteListTask : ProcessTaskWithListOutput + public class GitRemoteListTask : ProcessTaskWithListOutput { private const string TaskName = "git remote"; diff --git a/src/GitHub.Api/Git/Tasks/GitRemoteRemoveTask.cs b/src/GitHub.Api/Git/Tasks/GitRemoteRemoveTask.cs index c5a91cb56..2bfc4c1a5 100644 --- a/src/GitHub.Api/Git/Tasks/GitRemoteRemoveTask.cs +++ b/src/GitHub.Api/Git/Tasks/GitRemoteRemoveTask.cs @@ -1,9 +1,9 @@ using System; using System.Threading; -namespace GitHub.Unity +namespace GitHub.Unity.Git.Tasks { - class GitRemoteRemoveTask : ProcessTask + public class GitRemoteRemoveTask : ProcessTask { private const string TaskName = "git remote rm"; private readonly string arguments; diff --git a/src/GitHub.Api/Git/Tasks/GitRemoveFromIndexTask.cs b/src/GitHub.Api/Git/Tasks/GitRemoveFromIndexTask.cs index 899681004..391d884b0 100644 --- a/src/GitHub.Api/Git/Tasks/GitRemoveFromIndexTask.cs +++ b/src/GitHub.Api/Git/Tasks/GitRemoveFromIndexTask.cs @@ -1,9 +1,9 @@ using System.Collections.Generic; using System.Threading; -namespace GitHub.Unity +namespace GitHub.Unity.Git.Tasks { - class GitRemoveFromIndexTask : ProcessTask + public class GitRemoveFromIndexTask : ProcessTask { private const string TaskName = "git reset HEAD"; private readonly string arguments; @@ -28,4 +28,4 @@ public GitRemoveFromIndexTask(IEnumerable files, public override TaskAffinity Affinity { get { return TaskAffinity.Exclusive; } } public override string Message { get; set; } = "Unstaging files..."; } -} \ No newline at end of file +} diff --git a/src/GitHub.Api/Git/Tasks/GitRevertTask.cs b/src/GitHub.Api/Git/Tasks/GitRevertTask.cs index f281c60ba..b677546b5 100644 --- a/src/GitHub.Api/Git/Tasks/GitRevertTask.cs +++ b/src/GitHub.Api/Git/Tasks/GitRevertTask.cs @@ -1,8 +1,8 @@ using System.Threading; -namespace GitHub.Unity +namespace GitHub.Unity.Git.Tasks { - class GitRevertTask : ProcessTask + public class GitRevertTask : ProcessTask { private const string TaskName = "git revert"; private readonly string arguments; @@ -20,4 +20,4 @@ public GitRevertTask(string changeset, public override TaskAffinity Affinity { get { return TaskAffinity.Exclusive; } } public override string Message { get; set; } = "Reverting commit..."; } -} \ No newline at end of file +} diff --git a/src/GitHub.Api/Git/Tasks/GitStatusTask.cs b/src/GitHub.Api/Git/Tasks/GitStatusTask.cs index 3456fad67..2acf8d26a 100644 --- a/src/GitHub.Api/Git/Tasks/GitStatusTask.cs +++ b/src/GitHub.Api/Git/Tasks/GitStatusTask.cs @@ -1,8 +1,8 @@ using System.Threading; -namespace GitHub.Unity +namespace GitHub.Unity.Git.Tasks { - class GitStatusTask : ProcessTask + public class GitStatusTask : ProcessTask { private const string TaskName = "git status"; @@ -15,7 +15,7 @@ public GitStatusTask(IGitObjectFactory gitObjectFactory, public override string ProcessArguments { - get { return "-c i18n.logoutputencoding=utf8 -c core.quotepath=false status -b -u --porcelain"; } + get { return "-c i18n.logoutputencoding=utf8 -c core.quotepath=false --no-optional-locks status -b -u --porcelain"; } } public override TaskAffinity Affinity { get { return TaskAffinity.Exclusive; } } public override string Message { get; set; } = "Listing changed files..."; diff --git a/src/GitHub.Api/Git/Tasks/GitSwitchBranchesTask.cs b/src/GitHub.Api/Git/Tasks/GitSwitchBranchesTask.cs index d234f6b0b..7e4f4e283 100644 --- a/src/GitHub.Api/Git/Tasks/GitSwitchBranchesTask.cs +++ b/src/GitHub.Api/Git/Tasks/GitSwitchBranchesTask.cs @@ -1,9 +1,9 @@ using System; using System.Threading; -namespace GitHub.Unity +namespace GitHub.Unity.Git.Tasks { - class GitSwitchBranchesTask : ProcessTask + public class GitSwitchBranchesTask : ProcessTask { private const string TaskName = "git checkout"; private readonly string arguments; diff --git a/src/GitHub.Api/Git/Tasks/GitUnlockTask.cs b/src/GitHub.Api/Git/Tasks/GitUnlockTask.cs index 94ac54e40..ff534cd76 100644 --- a/src/GitHub.Api/Git/Tasks/GitUnlockTask.cs +++ b/src/GitHub.Api/Git/Tasks/GitUnlockTask.cs @@ -1,9 +1,9 @@ using System.Text; using System.Threading; -namespace GitHub.Unity +namespace GitHub.Unity.Git.Tasks { - class GitUnlockTask : ProcessTask + public class GitUnlockTask : ProcessTask { private const string TaskName = "git lfs unlock"; private readonly string arguments; diff --git a/src/GitHub.Api/Git/Tasks/GitVersionTask.cs b/src/GitHub.Api/Git/Tasks/GitVersionTask.cs index 4774cd84e..d4d01493a 100644 --- a/src/GitHub.Api/Git/Tasks/GitVersionTask.cs +++ b/src/GitHub.Api/Git/Tasks/GitVersionTask.cs @@ -1,9 +1,8 @@ -using System; using System.Threading; -namespace GitHub.Unity +namespace GitHub.Unity.Git.Tasks { - class GitVersionTask : ProcessTask + public class GitVersionTask : ProcessTask { private const string TaskName = "git --version"; @@ -17,4 +16,4 @@ public GitVersionTask(CancellationToken token, IOutputProcessor proc public override TaskAffinity Affinity { get { return TaskAffinity.Concurrent; } } public override string Message { get; set; } = "Reading git version..."; } -} \ No newline at end of file +} diff --git a/src/GitHub.Api/Git/TreeData.cs b/src/GitHub.Api/Git/TreeData.cs index ac9e67b2c..997cbffed 100644 --- a/src/GitHub.Api/Git/TreeData.cs +++ b/src/GitHub.Api/Git/TreeData.cs @@ -7,6 +7,7 @@ public interface ITreeData { string Path { get; } bool IsActive { get; } + bool IsChecked { get; } } [Serializable] @@ -64,6 +65,7 @@ public bool Equals(GitBranchTreeData other) public string Path => GitBranch.Name; public bool IsActive => isActive; + public bool IsChecked => false; } [Serializable] @@ -73,11 +75,13 @@ public struct GitStatusEntryTreeData : ITreeData public GitStatusEntry gitStatusEntry; public bool isLocked; + public bool isChecked; public GitStatusEntryTreeData(GitStatusEntry gitStatusEntry, bool isLocked = false) { this.isLocked = isLocked; this.gitStatusEntry = gitStatusEntry; + isChecked = gitStatusEntry.Staged; } public override int GetHashCode() @@ -127,5 +131,6 @@ public bool Equals(GitStatusEntryTreeData other) public GitStatusEntry GitStatusEntry => gitStatusEntry; public GitFileStatus FileStatus => gitStatusEntry.Status; public bool IsLocked => isLocked; + public bool IsChecked => isChecked; } -} \ No newline at end of file +} diff --git a/src/GitHub.Api/GitHub.Api.45.csproj b/src/GitHub.Api/GitHub.Api.45.csproj new file mode 100644 index 000000000..9833d34f1 --- /dev/null +++ b/src/GitHub.Api/GitHub.Api.45.csproj @@ -0,0 +1,327 @@ + + + + + Debug + AnyCPU + {B389ADAF-62CC-486E-85B4-2D8B078DF76B} + Library + Properties + GitHub.Unity + GitHub.Api.45 + v4.5 + 512 + + 6 + + + ..\UnityExtension\Assets\Editor\build\ + + + + true + full + false + DEBUG;TRACE;$(BuildDefs);NET_4_6 + prompt + 4 + false + false + true + + + pdbonly + true + TRACE;$(BuildDefs);NET_4_6 + prompt + 4 + Release + false + false + true + + + true + full + false + TRACE;DEBUG;DEVELOPER_BUILD;$(BuildDefs);NET_4_6 + prompt + 4 + false + false + true + ..\..\common\codeanalysis-debug.ruleset + + + Debug + + + + $(SolutionDir)lib\Mono.Posix.dll + + + $(SolutionDir)\lib\sfw\sfw.net.dll + True + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + True + True + Localization.resx + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Metrics\MetricsService.cs + + + Properties\ApplicationInfo_Local.cs + + + + + + + Properties\ApplicationInfo_Local.cs-example + + + Properties\ApplicationInfo_Local.cs + + + + + + + {bb6a8eda-15d8-471b-a6ed-ee551e0b3ba0} + GitHub.Logging + + + + + + + + + + + + + + + + + + PublicResXFileCodeGenerator + Localization.Designer.cs + Designer + + + + + + \ No newline at end of file diff --git a/src/GitHub.Api/GitHub.Api.45.v3.ncrunchproject b/src/GitHub.Api/GitHub.Api.45.v3.ncrunchproject new file mode 100644 index 000000000..ebf9681fd --- /dev/null +++ b/src/GitHub.Api/GitHub.Api.45.v3.ncrunchproject @@ -0,0 +1,8 @@ + + + + ..\..\script\lib\Managed\UnityEditor.dll + ..\..\script\lib\Managed\UnityEngine.dll + + + \ No newline at end of file diff --git a/src/GitHub.Api/GitHub.Api.csproj b/src/GitHub.Api/GitHub.Api.csproj index 4b2137e88..4b2c8d7e5 100644 --- a/src/GitHub.Api/GitHub.Api.csproj +++ b/src/GitHub.Api/GitHub.Api.csproj @@ -35,7 +35,7 @@ TRACE;$(BuildDefs) prompt 4 - Release + Release false false true @@ -60,9 +60,6 @@ $(SolutionDir)\packages\AsyncBridge.Net35.0.2.3333.0\lib\net35-Client\AsyncBridge.Net35.dll True - - $(SolutionDir)lib\ICSharpCode.SharpZipLib.dll - $(SolutionDir)lib\Mono.Posix.dll @@ -80,20 +77,24 @@ $(SolutionDir).\packages\TaskParallelLibrary.1.0.3333.0\lib\Net35\System.Threading.dll True + + + + @@ -107,11 +108,12 @@ + - + @@ -136,6 +138,21 @@ + + + + + + + + + + + + + + + @@ -163,15 +180,17 @@ + + + + - - @@ -181,8 +200,6 @@ True Localization.resx - - @@ -236,6 +253,32 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + @@ -295,8 +338,11 @@ Other similar extension points exist, see Microsoft.Common.targets. + --> + + + - --> \ No newline at end of file diff --git a/src/GitHub.Api/GitHub.Api.v3.ncrunchproject b/src/GitHub.Api/GitHub.Api.v3.ncrunchproject index 2f25da2c4..8fa6360df 100644 --- a/src/GitHub.Api/GitHub.Api.v3.ncrunchproject +++ b/src/GitHub.Api/GitHub.Api.v3.ncrunchproject @@ -1,8 +1,8 @@  - ..\..\script\lib\UnityEditor.dll - ..\..\script\lib\UnityEngine.dll + ..\..\script\lib\Managed\UnityEngine.dll + ..\..\script\lib\Managed\UnityEditor.dll True diff --git a/src/GitHub.Api/Helpers/Constants.cs b/src/GitHub.Api/Helpers/Constants.cs index 4380a57c9..1d0da6a86 100644 --- a/src/GitHub.Api/Helpers/Constants.cs +++ b/src/GitHub.Api/Helpers/Constants.cs @@ -14,10 +14,22 @@ static class Constants public const string Iso8601Format = @"yyyy-MM-dd\THH\:mm\:ss.fffzzz"; public const string Iso8601FormatZ = @"yyyy-MM-dd\THH\:mm\:ss\Z"; public static readonly string[] Iso8601Formats = { + Iso8601Format, Iso8601FormatZ, @"yyyy-MM-dd\THH\:mm\:ss.fffffffzzz", - Iso8601Format, + @"yyyy-MM-dd\THH\:mm\:ss.ffffffzzz", + @"yyyy-MM-dd\THH\:mm\:ss.fffffzzz", + @"yyyy-MM-dd\THH\:mm\:ss.ffffzzz", + @"yyyy-MM-dd\THH\:mm\:ss.ffzzz", + @"yyyy-MM-dd\THH\:mm\:ss.fzzz", @"yyyy-MM-dd\THH\:mm\:sszzz", + @"yyyy-MM-dd\THH\:mm\:ss.fffffff\Z", + @"yyyy-MM-dd\THH\:mm\:ss.ffffff\Z", + @"yyyy-MM-dd\THH\:mm\:ss.fffff\Z", + @"yyyy-MM-dd\THH\:mm\:ss.ffff\Z", + @"yyyy-MM-dd\THH\:mm\:ss.fff\Z", + @"yyyy-MM-dd\THH\:mm\:ss.ff\Z", + @"yyyy-MM-dd\THH\:mm\:ss.f\Z", }; public const DateTimeStyles DateTimeStyle = DateTimeStyles.AssumeUniversal | DateTimeStyles.AdjustToUniversal; public const string SkipVersionKey = "SkipVersion"; diff --git a/src/GitHub.Api/Helpers/Progress.cs b/src/GitHub.Api/Helpers/Progress.cs index c7ae0a003..312cd43da 100644 --- a/src/GitHub.Api/Helpers/Progress.cs +++ b/src/GitHub.Api/Helpers/Progress.cs @@ -72,8 +72,7 @@ public void UpdateProgress(long value, long total, string message = null) float fTotal = Total; float fValue = Value; Percentage = fValue / fTotal; - float delta = fValue / fTotal - previousValue / fTotal; - delta = delta * 100f / fTotal; + var delta = (fValue / fTotal - previousValue / fTotal) * 100f; if (Value != previousValue && (fValue == 0f || delta > 1f || fValue == fTotal)) { // signal progress in 1% increments or if we don't know what the total is diff --git a/src/GitHub.Api/Helpers/SimpleJson.cs b/src/GitHub.Api/Helpers/SimpleJson.cs index 2a0dafd78..1ae5e3ba7 100644 --- a/src/GitHub.Api/Helpers/SimpleJson.cs +++ b/src/GitHub.Api/Helpers/SimpleJson.cs @@ -1250,17 +1250,7 @@ class PocoJsonSerializerStrategy : IJsonSerializerStrategy internal static readonly Type[] EmptyTypes = new Type[0]; internal static readonly Type[] ArrayConstructorParameterTypes = new Type[] { typeof(int) }; - private static readonly string[] Iso8601Format = new string[] - { - @"yyyy-MM-dd\THH\:mm\:sszzz", - @"yyyy-MM-dd\THH\:mm\:ss.fffffffzzz", - @"yyyy-MM-dd\THH\:mm\:ss.fffzzz", - @"yyyy-MM-dd\THH\:mm\:ss\Z", - @"yyyy-MM-dd\THH:mm:ss.fffffffzzz", - @"yyyy-MM-dd\THH:mm:ss.fffzzz", - @"yyyy-MM-dd\THH:mm:sszzz", - @"yyyy-MM-dd\THH:mm:ss\Z", - }; + private static readonly string[] Iso8601Format = Constants.Iso8601Formats; public PocoJsonSerializerStrategy() { @@ -2224,7 +2214,7 @@ private static string ToJsonPropertyName(string propertyName) return propertyName.Substring(0, i).ToLowerInvariant() + propertyName.Substring(i); } - class JsonSerializationStrategy : PocoJsonSerializerStrategy + public class JsonSerializationStrategy : PocoJsonSerializerStrategy { private bool toLowerCase = false; private bool onlyPublic = true; diff --git a/src/GitHub.Api/Helpers/TaskHelpers.cs b/src/GitHub.Api/Helpers/TaskHelpers.cs index 33481c029..fb5695329 100644 --- a/src/GitHub.Api/Helpers/TaskHelpers.cs +++ b/src/GitHub.Api/Helpers/TaskHelpers.cs @@ -8,14 +8,18 @@ static class TaskHelpers { public static Task GetCompletedTask(T result) { +#if NET_4_6 + return Task.FromResult(result); +#else return TaskEx.FromResult(result); +#endif } public static Task ToTask(this Exception exception) { - TaskCompletionSource completionSource = new TaskCompletionSource(); - completionSource.TrySetException(exception); - return completionSource.Task; + TaskCompletionSource completionSource = new TaskCompletionSource(); + completionSource.TrySetException(exception); + return completionSource.Task; } } diff --git a/src/GitHub.Api/IO/NiceIO.cs b/src/GitHub.Api/IO/NiceIO.cs index bf008660c..c8605de6a 100644 --- a/src/GitHub.Api/IO/NiceIO.cs +++ b/src/GitHub.Api/IO/NiceIO.cs @@ -113,7 +113,7 @@ private static bool HasNonDotDotLastElement(List stack) private static string ParseDriveLetter(string path, out string driveLetter) { - if (path.Length >= 2 && path[1] == ':') + if (path.Length >= 3 && path[1] == ':' && (path[2] == '/' || path[2] == '\\')) { driveLetter = path[0].ToString(); return path.Substring(2); diff --git a/src/GitHub.Api/Installer/CopyHelper.cs b/src/GitHub.Api/Installer/CopyHelper.cs new file mode 100644 index 000000000..98faecb62 --- /dev/null +++ b/src/GitHub.Api/Installer/CopyHelper.cs @@ -0,0 +1,55 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text; +using GitHub.Logging; + +namespace GitHub.Unity +{ + public static class CopyHelper + { + private static readonly ILogging Logger = LogHelper.GetLogger(typeof(CopyHelper)); + + public static void Copy(NPath fromPath, NPath toPath) + { + Logger.Trace("Copying from {0} to {1}", fromPath, toPath); + + try + { + CopyFolder(fromPath, toPath); + } + catch (Exception ex1) + { + Logger.Warning(ex1, "Error copying."); + + try + { + CopyFolderContents(fromPath, toPath); + } + catch (Exception ex2) + { + Logger.Error(ex2, "Error copying contents."); + throw; + } + } + finally + { + fromPath.DeleteIfExists(); + } + } + public static void CopyFolder(NPath fromPath, NPath toPath) + { + Logger.Trace("CopyFolder from {0} to {1}", fromPath, toPath); + toPath.DeleteIfExists(); + toPath.EnsureParentDirectoryExists(); + fromPath.Move(toPath); + } + + public static void CopyFolderContents(NPath fromPath, NPath toPath) + { + Logger.Trace("CopyFolderContents from {0} to {1}", fromPath, toPath); + toPath.DeleteContents(); + fromPath.MoveFiles(toPath, true); + } + } +} diff --git a/src/GitHub.Api/Installer/GitInstaller.cs b/src/GitHub.Api/Installer/GitInstaller.cs index 32a158504..d8dd7594b 100644 --- a/src/GitHub.Api/Installer/GitInstaller.cs +++ b/src/GitHub.Api/Installer/GitInstaller.cs @@ -1,6 +1,7 @@ using System; using System.Threading; using GitHub.Logging; +using GitHub.Unity.Git.Tasks; namespace GitHub.Unity { @@ -55,8 +56,14 @@ public GitInstallationState SetupGitIfNeeded(GitInstallationState state = null) } state = VerifyZipFiles(state); + // on developer builds, prefer local zips over downloading +#if DEVELOPER_BUILD + state = GrabZipFromResourcesIfNeeded(state); + state = GetZipsIfNeeded(state); +#else state = GetZipsIfNeeded(state); state = GrabZipFromResourcesIfNeeded(state); +#endif state = ExtractGit(state); // if installing from zip failed (internet down maybe?), try to find a usable system git @@ -295,15 +302,16 @@ private GitInstallationState ExtractGit(GitInstallationState state) return true; }); unzipTask.Progress(p => Progress.UpdateProgress(40 + (long)(20 * p.Percentage), 100, unzipTask.Message)); - var path = unzipTask.RunSynchronously(); + unzipTask.RunSynchronously(); var target = state.GitInstallationPath; if (unzipTask.Successful) { - var source = path; - target.DeleteIfExists(); - target.EnsureParentDirectoryExists(); - source.Move(target); + Logger.Trace("Moving Git source:{0} target:{1}", gitExtractPath.ToString(), target.ToString()); + + CopyHelper.Copy(gitExtractPath, target); + state.GitIsValid = true; + state.IsCustomGitPath = state.GitExecutablePath != installDetails.GitExecutablePath; } } @@ -320,14 +328,14 @@ private GitInstallationState ExtractGit(GitInstallationState state) return true; }); unzipTask.Progress(p => Progress.UpdateProgress(60 + (long)(20 * p.Percentage), 100, unzipTask.Message)); - var path = unzipTask.RunSynchronously(); + unzipTask.RunSynchronously(); var target = state.GitLfsInstallationPath; if (unzipTask.Successful) { - var source = path; - target.DeleteIfExists(); - target.EnsureParentDirectoryExists(); - source.Move(target); + Logger.Trace("Moving GitLFS source:{0} target:{1}", gitLfsExtractPath.ToString(), target.ToString()); + + CopyHelper.Copy(gitLfsExtractPath, target); + state.GitLfsIsValid = true; } } diff --git a/src/GitHub.Api/Installer/OctorunInstaller.cs b/src/GitHub.Api/Installer/OctorunInstaller.cs index 497466be1..11ec351a2 100644 --- a/src/GitHub.Api/Installer/OctorunInstaller.cs +++ b/src/GitHub.Api/Installer/OctorunInstaller.cs @@ -33,12 +33,13 @@ public NPath SetupOctorunIfNeeded() GrabZipFromResources(); - var tempZipExtractPath = NPath.CreateTempDirectory("octorun_extract_archive_path"); + var extractPath = NPath.CreateTempDirectory("octorun_extract_archive_path"); var unzipTask = new UnzipTask(taskManager.Token, installDetails.ZipFile, - tempZipExtractPath, sharpZipLibHelper, + extractPath, sharpZipLibHelper, fileSystem) .Catch(e => { Logger.Error(e, "Error extracting octorun"); return true; }); - var extractPath = unzipTask.RunSynchronously(); + unzipTask.RunSynchronously(); + if (unzipTask.Successful) path = MoveOctorun(extractPath.Combine("octorun")); return path; @@ -52,10 +53,11 @@ private NPath GrabZipFromResources() private NPath MoveOctorun(NPath fromPath) { var toPath = installDetails.InstallationPath; - toPath.DeleteIfExists(); - toPath.EnsureParentDirectoryExists(); - fromPath.Move(toPath); - fromPath.Parent.Delete(); + + Logger.Trace("MoveOctorun fromPath: {0} toPath:{1}", fromPath.ToString(), toPath.ToString()); + + CopyHelper.Copy(fromPath, toPath); + return installDetails.ExecutablePath; } @@ -85,7 +87,7 @@ public class OctorunInstallDetails public const string DefaultZipMd5Url = "http://github-vs.s3.amazonaws.com/unity/octorun/octorun.zip.md5"; public const string DefaultZipUrl = "http://github-vs.s3.amazonaws.com/unity/octorun/octorun.zip"; - public const string PackageVersion = "b4b80eb4ac"; + public const string PackageVersion = "902910f48"; private const string PackageName = "octorun"; private const string zipFile = "octorun.zip"; diff --git a/src/GitHub.Api/Installer/ZipHelper.cs b/src/GitHub.Api/Installer/ZipHelper.cs index b01731b01..3d581c5e4 100644 --- a/src/GitHub.Api/Installer/ZipHelper.cs +++ b/src/GitHub.Api/Installer/ZipHelper.cs @@ -1,7 +1,7 @@ using System; using System.IO; using System.Threading; -using ICSharpCode.SharpZipLib.Zip; +using GitHub.ICSharpCode.SharpZipLib.Zip; using GitHub.Logging; using System.Collections.Generic; diff --git a/src/GitHub.Api/Localization.resx b/src/GitHub.Api/Localization.resx index da57ad667..d8c6ef16e 100644 --- a/src/GitHub.Api/Localization.resx +++ b/src/GitHub.Api/Localization.resx @@ -256,7 +256,7 @@ GitHub - ok + OK Cancel @@ -429,4 +429,4 @@ Discard - \ No newline at end of file + diff --git a/src/GitHub.Api/Metrics/UsageModel.cs b/src/GitHub.Api/Metrics/UsageModel.cs index 123146465..91a02c1d2 100644 --- a/src/GitHub.Api/Metrics/UsageModel.cs +++ b/src/GitHub.Api/Metrics/UsageModel.cs @@ -22,6 +22,7 @@ public class Dimensions public string UnityVersion { get; set; } public string Lang { get; set; } public string CurrentLang { get; set; } + public string GitHubUser { get; set; } } public class Measures diff --git a/src/GitHub.Api/Metrics/UsageTracker.cs b/src/GitHub.Api/Metrics/UsageTracker.cs index 08c515b21..0e8bce059 100644 --- a/src/GitHub.Api/Metrics/UsageTracker.cs +++ b/src/GitHub.Api/Metrics/UsageTracker.cs @@ -8,6 +8,13 @@ namespace GitHub.Unity { class UsageTrackerSync : IUsageTracker { + +#if DEVELOPER_BUILD + protected internal const int MetrisReportTimeout = 30; +#else + protected internal const int MetrisReportTimeout = 3 * 60; +#endif + private static ILogging Logger { get; } = LogHelper.GetLogger(); private static object _lock = new object(); @@ -44,7 +51,7 @@ public UsageTrackerSync(ISettings userSettings, IUsageLoader usageLoader, Logger.Trace("userId:{0} instanceId:{1}", userId, instanceId); if (Enabled) - RunTimer(3 * 60); + RunTimer(MetrisReportTimeout); } private void RunTimer(int seconds) @@ -94,6 +101,11 @@ private void SendUsage() return; } + var username = GetUsername(); + if (!String.IsNullOrEmpty(username)) { + extractReports.ForEach(x => x.Dimensions.GitHubUser = username); + } + try { MetricsService.PostUsage(extractReports); @@ -316,6 +328,11 @@ public virtual void UpdateLfsDiskUsage(int kilobytes) } } + protected virtual string GetUsername() + { + return ""; + } + public bool Enabled { get @@ -344,7 +361,9 @@ class UsageTracker : UsageTrackerSync { public UsageTracker(ITaskManager taskManager, IGitClient gitClient, IProcessManager processManager, ISettings userSettings, - IEnvironment environment, string instanceId) + IEnvironment environment, + IKeychain keychain, + string instanceId) : base(userSettings, new UsageLoader(environment.UserCachePath.Combine(Constants.UsageFile)), environment.UnityVersion, instanceId) @@ -353,6 +372,7 @@ public UsageTracker(ITaskManager taskManager, IGitClient gitClient, IProcessMana Environment = environment; GitClient = gitClient; ProcessManager = processManager; + Keychain = keychain; } protected override void CaptureRepoSize() @@ -377,6 +397,18 @@ protected override void CaptureRepoSize() catch {} } + protected override string GetUsername() + { + string username = ""; + try { + var apiClient = new ApiClient(Keychain, ProcessManager, TaskManager, Environment); + var user = apiClient.GetCurrentUser(); + username = user.Login; + } catch { + } + return username; + } + public override void IncrementApplicationMenuMenuItemCommandLine() => TaskManager.Run(base.IncrementApplicationMenuMenuItemCommandLine); public override void IncrementAuthenticationViewButtonAuthentication() => TaskManager.Run(base.IncrementAuthenticationViewButtonAuthentication); public override void IncrementBranchesViewButtonCheckoutLocalBranch() => TaskManager.Run(base.IncrementBranchesViewButtonCheckoutLocalBranch); @@ -400,6 +432,7 @@ protected override void CaptureRepoSize() protected IEnvironment Environment { get; } protected IGitClient GitClient { get; } public IProcessManager ProcessManager { get; } + protected IKeychain Keychain { get; } } interface IUsageLoader diff --git a/src/GitHub.Api/OutputProcessors/BranchListOutputProcessor.cs b/src/GitHub.Api/OutputProcessors/BranchListOutputProcessor.cs index 2a46e7474..2b6427f40 100644 --- a/src/GitHub.Api/OutputProcessors/BranchListOutputProcessor.cs +++ b/src/GitHub.Api/OutputProcessors/BranchListOutputProcessor.cs @@ -1,4 +1,5 @@ using System; +using System.Security.AccessControl; using System.Text.RegularExpressions; namespace GitHub.Unity @@ -18,29 +19,34 @@ public override void LineReceived(string line) try { - proc.Matches('*'); + string name; + string trackingName = null; + + if (proc.Matches('*')) + proc.MoveNext(); proc.SkipWhitespace(); - var detached = proc.Matches("(HEAD "); - var name = "detached"; - if (detached) + if (proc.Matches("(HEAD ")) { + name = "detached"; proc.MoveToAfter(')'); } else { name = proc.ReadUntilWhitespace(); } - proc.SkipWhitespace(); - proc.ReadUntilWhitespace(); - var tracking = proc.Matches(trackingBranchRegex); - var trackingName = ""; - if (tracking) + + proc.ReadUntilWhitespaceTrim(); + if (proc.Matches(trackingBranchRegex)) { trackingName = proc.ReadChunk('[', ']'); + var indexOf = trackingName.IndexOf(':'); + if (indexOf != -1) + { + trackingName = trackingName.Substring(0, indexOf); + } } var branch = new GitBranch(name, trackingName); - RaiseOnEntry(branch); } catch(Exception ex) diff --git a/src/GitHub.Api/OutputProcessors/GitCountObjectsProcessor.cs b/src/GitHub.Api/OutputProcessors/GitCountObjectsProcessor.cs index 56b4d96ff..3fe0f31b7 100644 --- a/src/GitHub.Api/OutputProcessors/GitCountObjectsProcessor.cs +++ b/src/GitHub.Api/OutputProcessors/GitCountObjectsProcessor.cs @@ -15,9 +15,8 @@ public override void LineReceived(string line) { var proc = new LineParser(line); - proc.ReadUntil(','); - proc.SkipWhitespace(); - var kilobytes = int.Parse(proc.ReadUntilWhitespace()); + proc.MoveToAfter(','); + var kilobytes = int.Parse(proc.ReadUntilWhitespaceTrim()); RaiseOnEntry(kilobytes); } diff --git a/src/GitHub.Api/OutputProcessors/LineProcessor.cs b/src/GitHub.Api/OutputProcessors/LineProcessor.cs index 213717162..8d90ea413 100644 --- a/src/GitHub.Api/OutputProcessors/LineProcessor.cs +++ b/src/GitHub.Api/OutputProcessors/LineProcessor.cs @@ -61,19 +61,30 @@ public void SkipWhitespace() if (IsAtEnd) throw new InvalidOperationException("Reached end of line"); - while (!Char.IsWhiteSpace(line[current]) && current < line.Length) - current++; - while (Char.IsWhiteSpace(line[current]) && current < line.Length) + while (current < line.Length && char.IsWhiteSpace(line[current])) current++; } - public string ReadUntil(char separator) + /// + /// Reads until it finds the separator and returns what it read. + /// + /// + /// If the current character matches the + /// separator and you actually want to read the next match, set this to true (if you're tokenizing, for instance) + /// + public string ReadUntil(char separator, bool skipCurrentIfMatch = false) { if (IsAtEnd) throw new InvalidOperationException("Reached end of line"); - if (line[current] == separator) - current++; + if (Matches(separator)) + { + if (skipCurrentIfMatch) + MoveNext(); + else + return null; + } + var end = line.IndexOf(separator, current); if (end == -1) return null; @@ -82,23 +93,30 @@ public string ReadUntil(char separator) return LastSubstring; } + + public string ReadUntilWhitespaceTrim() + { + SkipWhitespace(); + if (IsAtEnd) + return null; + return ReadUntilWhitespace(); + } + public string ReadUntilWhitespace() { if (IsAtEnd) throw new InvalidOperationException("Reached end of line"); - if (Char.IsWhiteSpace(line[current])) - SkipWhitespace(); + if (char.IsWhiteSpace(line[current])) + return null; + + var end = current; + while (end < line.Length && !char.IsWhiteSpace(line[end])) + end++; + + if (end == current) // current character is a whitespace, read nothing + return null; - int end = line.Length; - for (var i = current; i < end; i++) - { - if (Char.IsWhiteSpace(line[i])) - { - end = i; - break; - } - } LastSubstring = line.Substring(current, end - current); current = end; return LastSubstring; @@ -125,6 +143,30 @@ public string ReadToEnd() return LastSubstring; } + public string Read(int howMany) + { + if (IsAtEnd) + throw new InvalidOperationException("Reached end of line"); + + if (current + howMany > line.Length) + return null; + + LastSubstring = line.Substring(current, howMany); + current += howMany; + return LastSubstring; + } + + public char ReadChar() + { + if (IsAtEnd) + throw new InvalidOperationException("Reached end of line"); + + var ret = line[current]; + LastSubstring = ret.ToString(); + MoveNext(); + return ret; + } + public string ReadUntilLast(string str) { if (IsAtEnd) @@ -138,10 +180,10 @@ public string ReadUntilLast(string str) return LastSubstring; } - public bool IsAtEnd { get { return line != null ? line.Length == current : true; } } - public bool IsAtWhitespace { get { return line != null && Char.IsWhiteSpace(line[current]); } } - public bool IsAtDigit { get { return line != null && Char.IsDigit(line[current]); } } - public bool IsAtLetter { get { return line != null && Char.IsLetter(line[current]); } } + public bool IsAtEnd => line == null || line.Length == current; + public bool IsAtWhitespace => line != null && Char.IsWhiteSpace(line[current]); + public bool IsAtDigit => line != null && Char.IsDigit(line[current]); + public bool IsAtLetter => line != null && Char.IsLetter(line[current]); public string LastSubstring { get; private set; } } -} \ No newline at end of file +} diff --git a/src/GitHub.Api/OutputProcessors/LogEntryOutputProcessor.cs b/src/GitHub.Api/OutputProcessors/LogEntryOutputProcessor.cs index 4ad7f6560..79d1fdeef 100644 --- a/src/GitHub.Api/OutputProcessors/LogEntryOutputProcessor.cs +++ b/src/GitHub.Api/OutputProcessors/LogEntryOutputProcessor.cs @@ -1,6 +1,7 @@ using System; using System.Collections.Generic; using System.Globalization; +using System.Linq; using System.Text; using System.Text.RegularExpressions; @@ -143,7 +144,6 @@ public override void LineReceived(string line) } summary = line; - descriptionLines.Add(line); phase++; // there's no description so skip it if (oneliner) @@ -190,101 +190,45 @@ public override void LineReceived(string line) var proc = new LineParser(line); string file = null; - GitFileStatus status; + GitFileStatus status = GitFileStatus.None; string originalPath = null; - if (proc.Matches('M')) - { - status = GitFileStatus.Modified; - } - else if (proc.Matches('A')) - { - status = GitFileStatus.Added; - } - else if (proc.Matches('D')) - { - status = GitFileStatus.Deleted; - } - else if (proc.Matches('R')) - { - status = GitFileStatus.Renamed; - } - else if (proc.Matches('C')) - { - status = GitFileStatus.Copied; - } - else if (proc.Matches('T')) - { - status = GitFileStatus.TypeChange; - } - else if (proc.Matches('U')) - { - status = GitFileStatus.Unmerged; - } - else if (proc.Matches('X')) - { - status = GitFileStatus.Unknown; - } - else if (proc.Matches('B')) - { - status = GitFileStatus.Broken; - } - else if (String.IsNullOrEmpty(line)) + if (proc.IsAtEnd) { // there's no files on this commit, it's a new one! ReturnGitLogEntry(); return; } else + { + status = GitStatusEntry.ParseStatusMarker(proc.ReadChar()); + } + + if (status == GitFileStatus.None) { HandleUnexpected(line); return; } - switch (status) + proc.ReadUntilWhitespace(); + if (status == GitFileStatus.Copied || status == GitFileStatus.Renamed) { - case GitFileStatus.Modified: - case GitFileStatus.Added: - case GitFileStatus.Deleted: - proc.SkipWhitespace(); - - file = proc.Matches('"') - ? proc.ReadUntil('"') - : proc.ReadToEnd(); - - break; - case GitFileStatus.Renamed: - - proc.SkipWhitespace(); - - originalPath = - proc.Matches('"') - ? proc.ReadUntil('"') - : proc.ReadUntilWhitespace(); - - proc.SkipWhitespace(); - - file = proc.Matches('"') - ? proc.ReadUntil('"') - : proc.ReadToEnd(); - - break; - - default: - proc.SkipWhitespace(); - - file = proc.Matches('"') - ? proc.ReadUntil('"') - : proc.ReadUntilWhitespace(); - if (file == null) - { - file = proc.ReadToEnd(); - } - - break; + var files = + proc.ReadToEnd().Trim() + .Split(new char[] { '\t' }, StringSplitOptions.RemoveEmptyEntries) + .Select(s => s.Trim()) + .Select(s => s.Trim('"')) + .ToArray(); + + originalPath = files[0]; + file = files[1]; + } + else + { + file = proc.ReadToEnd().Trim().Trim('"'); } - changes.Add(gitObjectFactory.CreateGitStatusEntry(file, status, originalPath)); + changes.Add(gitObjectFactory.CreateGitStatusEntry(file, status, GitFileStatus.None, originalPath)); break; @@ -313,7 +257,8 @@ private void ReturnGitLogEntry() { PopNewlines(); - var description = string.Join(Environment.NewLine, descriptionLines.ToArray()); + var filteredDescriptionLines = (descriptionLines.Any() && string.IsNullOrEmpty(descriptionLines.First()) ? descriptionLines.Skip(1) : descriptionLines).ToArray(); + var description = string.Join(Environment.NewLine, filteredDescriptionLines); if (time.HasValue) { @@ -347,4 +292,4 @@ private enum ProcessingPhase Files = 10, } } -} \ No newline at end of file +} diff --git a/src/GitHub.Api/OutputProcessors/RemoteListOutputProcessor.cs b/src/GitHub.Api/OutputProcessors/RemoteListOutputProcessor.cs index 03f8f9d68..08332bbe6 100644 --- a/src/GitHub.Api/OutputProcessors/RemoteListOutputProcessor.cs +++ b/src/GitHub.Api/OutputProcessors/RemoteListOutputProcessor.cs @@ -110,4 +110,4 @@ private void Reset() currentUrl = null; } } -} \ No newline at end of file +} diff --git a/src/GitHub.Api/OutputProcessors/StatusOutputProcessor.cs b/src/GitHub.Api/OutputProcessors/StatusOutputProcessor.cs index ad1e866d9..26be0f9fd 100644 --- a/src/GitHub.Api/OutputProcessors/StatusOutputProcessor.cs +++ b/src/GitHub.Api/OutputProcessors/StatusOutputProcessor.cs @@ -89,97 +89,76 @@ public override void LineReceived(string line) } else { - // M GitHubVS.sln - //R README.md -> README2.md - // D deploy.cmd - //A something added.txt - //?? something.txt + var gitStatusMarker = proc.Read(2); + if (gitStatusMarker == null) + { + HandleUnexpected(line); + return; + } + + + /* + X Y Meaning + ------------------------------------------------- + [AMD] not updated + M [ MD] updated in index + A [ MD] added to index + D deleted from index + R [ MD] renamed in index + C [ MD] copied in index + [MARC] index and work tree matches + [ MARC] M work tree changed since index + [ MARC] D deleted in work tree + [ D] R renamed in work tree + [ D] C copied in work tree + ------------------------------------------------- + D D unmerged, both deleted + A A unmerged, both added + A U unmerged, added by us + D U unmerged, deleted by us + U A unmerged, added by them + U D unmerged, deleted by them + U U unmerged, both modified + ------------------------------------------------- + ? ? untracked + ! ! ignored + ------------------------------------------------- + */ string originalPath = null; string path = null; - var status = GitFileStatus.Added; - var staged = false; - if (proc.Matches('?')) - { - //?? something.txt - proc.MoveToAfter('?'); - proc.SkipWhitespace(); + var indexStatusMarker = gitStatusMarker[0]; + var workTreeStatusMarker = gitStatusMarker[1]; - path = proc.ReadToEnd().Trim('"'); - status = GitFileStatus.Untracked; - } - else if (proc.Matches('!')) + GitFileStatus indexStatus = GitStatusEntry.ParseStatusMarker(indexStatusMarker); + GitFileStatus workTreeStatus = GitStatusEntry.ParseStatusMarker(workTreeStatusMarker); + GitFileStatus status = workTreeStatus != GitFileStatus.None ? workTreeStatus : indexStatus; + + if (status == GitFileStatus.None) { - //?? something.txt - proc.MoveToAfter('!'); - proc.SkipWhitespace(); + HandleUnexpected(line); + return; + } - path = proc.ReadToEnd().Trim('"'); - status = GitFileStatus.Ignored; + if (status == GitFileStatus.Copied || status == GitFileStatus.Renamed) + { + var files = + proc.ReadToEnd() + .Split(new[] { "->" }, StringSplitOptions.RemoveEmptyEntries) + .Select(s => s.Trim()) + .Select(s => s.Trim('"')) + .ToArray(); + + originalPath = files[0]; + path = files[1]; } else { - if (proc.IsAtWhitespace) - { - proc.SkipWhitespace(); - } - else - { - staged = true; - } - - if (proc.Matches('M')) - { - //M GitHubVS.sln - proc.MoveNext(); - proc.SkipWhitespace(); - - path = proc.ReadToEnd().Trim('"'); - status = GitFileStatus.Modified; - } - else if (proc.Matches('D')) - { - //D deploy.cmd - proc.MoveNext(); - proc.SkipWhitespace(); - - path = proc.ReadToEnd().Trim('"'); - status = GitFileStatus.Deleted; - } - else if (proc.Matches('R')) - { - //R README.md -> README2.md - proc.MoveNext(); - proc.SkipWhitespace(); - - var files = - proc.ReadToEnd() - .Split(new[] { "->" }, StringSplitOptions.RemoveEmptyEntries) - .Select(s => s.Trim()) - .Select(s => s.Trim('"')) - .ToArray(); - - originalPath = files[0]; - path = files[1]; - status = GitFileStatus.Renamed; - } - else if (proc.Matches('A')) - { - //A something added.txt - proc.MoveNext(); - proc.SkipWhitespace(); - - path = proc.ReadToEnd().Trim('"'); - status = GitFileStatus.Added; - } - else - { - HandleUnexpected(line); - } + path = proc.ReadToEnd().Trim().Trim('"'); } - var gitStatusEntry = gitObjectFactory.CreateGitStatusEntry(path, status, originalPath, staged); + var gitStatusEntry = gitObjectFactory.CreateGitStatusEntry(path, indexStatus, workTreeStatus, originalPath); gitStatus.Entries.Add(gitStatusEntry); } } @@ -191,7 +170,7 @@ private void ReturnStatus() return; gitStatus.Entries = gitStatus.Entries - .OrderBy(entry => entry.Path) + .OrderBy(entry => entry.Path, StatusOutputPathComparer.Instance) .ToList(); RaiseOnEntry(gitStatus); @@ -214,5 +193,32 @@ private void HandleUnexpected(string line) { Logger.Error("Unexpected Input:\"{0}\"", line); } + + public class StatusOutputPathComparer : IComparer + { + public static StatusOutputPathComparer Instance => new StatusOutputPathComparer(); + + public int Compare(string x, string y) + { + Guard.ArgumentNotNull(x, nameof(x)); + Guard.ArgumentNotNull(y, nameof(y)); + + var meta = ".meta"; + var xHasMeta = x.EndsWith(meta); + var yHasMeta = y.EndsWith(meta); + + if(!xHasMeta && !yHasMeta) return StringComparer.InvariantCulture.Compare(x, y); + + var xPure = xHasMeta ? x.Substring(0, x.Length - meta.Length) : x; + var yPure = yHasMeta ? y.Substring(0, y.Length - meta.Length) : y; + + if (xHasMeta) + { + return xPure.Equals(y) ? 1 : StringComparer.InvariantCulture.Compare(xPure, yPure); + } + + return yPure.Equals(x) ? -1 : StringComparer.InvariantCulture.Compare(xPure, yPure); + } + } } } diff --git a/src/GitHub.Api/OutputProcessors/WindowsDiskUsageOutputProcessor.cs b/src/GitHub.Api/OutputProcessors/WindowsDiskUsageOutputProcessor.cs index 761b903f5..5de075701 100644 --- a/src/GitHub.Api/OutputProcessors/WindowsDiskUsageOutputProcessor.cs +++ b/src/GitHub.Api/OutputProcessors/WindowsDiskUsageOutputProcessor.cs @@ -1,13 +1,16 @@ using System; +using System.Text.RegularExpressions; namespace GitHub.Unity { - public class WindowsDiskUsageOutputProcessor : BaseOutputProcessor + public class WindowsDiskUsageOutputProcessor : BaseOutputProcessor { private int index = -1; private int lineCount = 0; private string[] buffer = new string[2]; - + // 199854 File(s) 25,835,841,045 bytes + private static readonly Regex totalFileCount = new Regex(@"[\s]*[\d]+[\s]+File\(s\)[\s]+(?[^\s]+)", + RegexOptions.Compiled); public override void LineReceived(string line) { lineCount++; @@ -24,14 +27,14 @@ public override void LineReceived(string line) Logger.Trace("Processing: {0}", output); - var proc = new LineParser(output); - proc.SkipWhitespace(); - proc.ReadUntilWhitespace(); - proc.ReadUntilWhitespace(); - proc.SkipWhitespace(); + var match = totalFileCount.Match(output); + long kilobytes = 0; + if (match.Success) + { + var bytes = long.Parse(match.Groups["bytes"].Value.Replace(",", String.Empty).Replace(".", String.Empty)); + kilobytes = bytes / 1024; + } - var bytes = int.Parse(proc.ReadUntilWhitespace().Replace(",", string.Empty)); - var kilobytes = bytes / 1024; RaiseOnEntry(kilobytes); } else @@ -40,4 +43,4 @@ public override void LineReceived(string line) } } } -} \ No newline at end of file +} diff --git a/src/GitHub.Api/Platform/DefaultEnvironment.cs b/src/GitHub.Api/Platform/DefaultEnvironment.cs index 86e1ea5d0..eece16e48 100644 --- a/src/GitHub.Api/Platform/DefaultEnvironment.cs +++ b/src/GitHub.Api/Platform/DefaultEnvironment.cs @@ -1,5 +1,6 @@ using GitHub.Logging; using System; +using System.Globalization; using System.IO; using System.Linq; @@ -33,7 +34,7 @@ public DefaultEnvironment() else { localAppData = GetSpecialFolder(Environment.SpecialFolder.LocalApplicationData).ToNPath(); - commonAppData = "/usr/local/share/".ToNPath(); + commonAppData = GetSpecialFolder(Environment.SpecialFolder.ApplicationData).ToNPath(); } UserCachePath = localAppData.Combine(ApplicationInfo.ApplicationName); @@ -83,15 +84,15 @@ public void InitializeRepository(NPath? repositoryPath = null) Guard.NotNull(this, FileSystem, nameof(FileSystem)); NPath expectedRepositoryPath; - if (!RepositoryPath.IsInitialized) + if (!RepositoryPath.IsInitialized || (repositoryPath != null && RepositoryPath != repositoryPath.Value)) { Guard.NotNull(this, UnityProjectPath, nameof(UnityProjectPath)); expectedRepositoryPath = repositoryPath != null ? repositoryPath.Value : UnityProjectPath; - if (!expectedRepositoryPath.DirectoryExists(".git")) + if (!expectedRepositoryPath.Exists(".git")) { - NPath reporoot = UnityProjectPath.RecursiveParents.FirstOrDefault(d => d.DirectoryExists(".git")); + NPath reporoot = UnityProjectPath.RecursiveParents.FirstOrDefault(d => d.Exists(".git")); if (reporoot.IsInitialized) expectedRepositoryPath = reporoot; } @@ -102,7 +103,7 @@ public void InitializeRepository(NPath? repositoryPath = null) } FileSystem.SetCurrentDirectory(expectedRepositoryPath); - if (expectedRepositoryPath.DirectoryExists(".git")) + if (expectedRepositoryPath.Exists(".git")) { RepositoryPath = expectedRepositoryPath; Repository = new Repository(RepositoryPath, CacheContainer); @@ -116,12 +117,25 @@ public string GetSpecialFolder(Environment.SpecialFolder folder) public string ExpandEnvironmentVariables(string name) { - return Environment.ExpandEnvironmentVariables(name); + var key = GetEnvironmentVariableKey(name); + return Environment.ExpandEnvironmentVariables(key); + } + + public string GetEnvironmentVariable(string name) + { + var key = GetEnvironmentVariableKey(name); + return Environment.GetEnvironmentVariable(key); + } + + public string GetEnvironmentVariableKey(string name) + { + return GetEnvironmentVariableKeyInternal(name); } - public string GetEnvironmentVariable(string variable) + private static string GetEnvironmentVariableKeyInternal(string name) { - return Environment.GetEnvironmentVariable(variable); + return Environment.GetEnvironmentVariables().Keys.Cast() + .FirstOrDefault(k => string.Compare(name, k, true, CultureInfo.InvariantCulture) == 0) ?? name; } public NPath LogPath { get; } @@ -134,7 +148,7 @@ public string GetEnvironmentVariable(string variable) public NPath ExtensionInstallPath { get; set; } public NPath UserCachePath { get; set; } public NPath SystemCachePath { get; set; } - public string Path { get; set; } = Environment.GetEnvironmentVariable("PATH"); + public string Path { get; set; } = Environment.GetEnvironmentVariable(GetEnvironmentVariableKeyInternal("PATH")); public string NewLine => Environment.NewLine; public NPath OctorunScriptPath diff --git a/src/GitHub.Api/Platform/FindExecTask.cs b/src/GitHub.Api/Platform/FindExecTask.cs index dcf4e5feb..941948919 100644 --- a/src/GitHub.Api/Platform/FindExecTask.cs +++ b/src/GitHub.Api/Platform/FindExecTask.cs @@ -2,7 +2,7 @@ namespace GitHub.Unity { - class FindExecTask : ProcessTask + public class FindExecTask : ProcessTask { private readonly string arguments; diff --git a/src/GitHub.Api/Platform/IEnvironment.cs b/src/GitHub.Api/Platform/IEnvironment.cs index 24a5130fb..f21c22eab 100644 --- a/src/GitHub.Api/Platform/IEnvironment.cs +++ b/src/GitHub.Api/Platform/IEnvironment.cs @@ -41,5 +41,6 @@ public interface IEnvironment ISettings LocalSettings { get; } ISettings SystemSettings { get; } ISettings UserSettings { get; } + string GetEnvironmentVariableKey(string name); } -} \ No newline at end of file +} diff --git a/src/GitHub.Api/Platform/Platform.cs b/src/GitHub.Api/Platform/Platform.cs index 71456a5a5..34b07817e 100644 --- a/src/GitHub.Api/Platform/Platform.cs +++ b/src/GitHub.Api/Platform/Platform.cs @@ -12,7 +12,7 @@ public interface IPlatform IKeychain Keychain { get; } } - class Platform : IPlatform + public class Platform : IPlatform { public Platform(IEnvironment environment) { diff --git a/src/GitHub.Api/Platform/Settings.cs b/src/GitHub.Api/Platform/Settings.cs index 1a2e02d2e..d49fa1893 100644 --- a/src/GitHub.Api/Platform/Settings.cs +++ b/src/GitHub.Api/Platform/Settings.cs @@ -7,7 +7,7 @@ namespace GitHub.Unity { - abstract class BaseSettings : ISettings + public abstract class BaseSettings : ISettings { public abstract bool Exists(string key); public abstract string Get(string key, string fallback = ""); @@ -21,7 +21,7 @@ abstract class BaseSettings : ISettings protected virtual string SettingsFileName { get; set; } } - class JsonBackedSettings : BaseSettings + public class JsonBackedSettings : BaseSettings { private string cachePath; protected Dictionary cacheData; @@ -225,7 +225,7 @@ private void EnsureCachePath(string path) } } - class LocalSettings : JsonBackedSettings + public class LocalSettings : JsonBackedSettings { private const string RelativeSettingsPath = "ProjectSettings"; private const string settingsFileName = "GitHub.local.json"; @@ -238,7 +238,7 @@ public LocalSettings(IEnvironment environment) protected override string SettingsFileName { get { return settingsFileName; } } } - class UserSettings : JsonBackedSettings + public class UserSettings : JsonBackedSettings { private const string settingsFileName = "usersettings.json"; private const string oldSettingsFileName = "settings.json"; @@ -263,7 +263,7 @@ public override void Initialize() protected override string SettingsFileName { get { return settingsFileName; } } } - class SystemSettings : JsonBackedSettings + public class SystemSettings : JsonBackedSettings { private const string settingsFileName = "systemsettings.json"; private const string oldSettingsFileName = "settings.json"; diff --git a/src/GitHub.Api/Platform/WindowsDiskUsageTask.cs b/src/GitHub.Api/Platform/WindowsDiskUsageTask.cs index 75fd4ff32..d2fb89d12 100644 --- a/src/GitHub.Api/Platform/WindowsDiskUsageTask.cs +++ b/src/GitHub.Api/Platform/WindowsDiskUsageTask.cs @@ -3,7 +3,7 @@ namespace GitHub.Unity { - class WindowsDiskUsageTask : ProcessTask + class WindowsDiskUsageTask : ProcessTask { private readonly string arguments; @@ -19,4 +19,4 @@ public WindowsDiskUsageTask(NPath directory, CancellationToken token) public override TaskAffinity Affinity { get { return TaskAffinity.Concurrent; } } public override string Message { get; set; } = "Getting directory size..."; } -} \ No newline at end of file +} diff --git a/src/GitHub.Api/Primitives/HostAddress.cs b/src/GitHub.Api/Primitives/HostAddress.cs index fd738b649..2382f98c1 100644 --- a/src/GitHub.Api/Primitives/HostAddress.cs +++ b/src/GitHub.Api/Primitives/HostAddress.cs @@ -75,14 +75,23 @@ public static bool IsGitHubDotCom(Uri hostUri) || hostUri.IsSameHost(gistUri); } - public static bool IsGitHubDotCom(string url) + public static bool IsGitHubDotCom(UriString hostUri) { - if (String.IsNullOrEmpty(url)) - return false; - Uri uri = null; - if (!Uri.TryCreate(url, UriKind.Absolute, out uri)) + return hostUri.Host == GitHubDotComHostAddress.WebUri.Host + || hostUri.Host == GitHubDotComHostAddress.ApiUri.Host + || hostUri.Host == gistUri.Host; + } + + public static bool IsGitHubDotCom(Connection connection) + { + if (connection == null || String.IsNullOrEmpty(connection.Host)) return false; - return IsGitHubDotCom(uri); + + var connectionHost = connection.Host.ToUriString(); + + return connectionHost.Host == GitHubDotComHostAddress.WebUri.Host + || connectionHost.Host == GitHubDotComHostAddress.ApiUri.Host + || connectionHost.Host == gistUri.Host; } public bool IsGitHubDotCom() diff --git a/src/GitHub.Api/Primitives/UriString.cs b/src/GitHub.Api/Primitives/UriString.cs index a60decfb7..17c725ca3 100644 --- a/src/GitHub.Api/Primitives/UriString.cs +++ b/src/GitHub.Api/Primitives/UriString.cs @@ -28,7 +28,8 @@ public class UriString : StringEquivalent, IEquatable public UriString(string uriString) : base(NormalizePath(uriString)) { if (uriString == null || uriString.Length == 0) return; - if (Uri.TryCreate(uriString, UriKind.Absolute, out url)) + if (Uri.TryCreate(uriString, UriKind.Absolute, out url) + || Uri.TryCreate("https://" + uriString, UriKind.Absolute, out url)) { if (!url.IsFile) SetUri(url); diff --git a/src/GitHub.Api/Platform/IProcessEnvironment.cs b/src/GitHub.Api/Process/IProcessEnvironment.cs similarity index 100% rename from src/GitHub.Api/Platform/IProcessEnvironment.cs rename to src/GitHub.Api/Process/IProcessEnvironment.cs diff --git a/src/GitHub.Api/OutputProcessors/IProcessManager.cs b/src/GitHub.Api/Process/IProcessManager.cs similarity index 100% rename from src/GitHub.Api/OutputProcessors/IProcessManager.cs rename to src/GitHub.Api/Process/IProcessManager.cs diff --git a/src/GitHub.Api/Platform/ProcessEnvironment.cs b/src/GitHub.Api/Process/ProcessEnvironment.cs similarity index 90% rename from src/GitHub.Api/Platform/ProcessEnvironment.cs rename to src/GitHub.Api/Process/ProcessEnvironment.cs index b1609faa3..bf97546cb 100644 --- a/src/GitHub.Api/Platform/ProcessEnvironment.cs +++ b/src/GitHub.Api/Process/ProcessEnvironment.cs @@ -1,11 +1,10 @@ using GitHub.Logging; -using System; using System.Collections.Generic; using System.Diagnostics; namespace GitHub.Unity { - class ProcessEnvironment : IProcessEnvironment + public class ProcessEnvironment : IProcessEnvironment { protected IEnvironment Environment { get; private set; } protected ILogging Logger { get; private set; } @@ -24,11 +23,12 @@ public void Configure(ProcessStartInfo psi, NPath workingDirectory, bool dontSet var path = Environment.Path; psi.EnvironmentVariables["GHU_WORKINGDIR"] = workingDirectory; + var pathEnvVarKey = Environment.GetEnvironmentVariableKey("PATH"); if (dontSetupGit) { psi.EnvironmentVariables["GHU_FULLPATH"] = path; - psi.EnvironmentVariables["PATH"] = path; + psi.EnvironmentVariables[pathEnvVarKey] = path; return; } @@ -87,10 +87,10 @@ public void Configure(ProcessStartInfo psi, NPath workingDirectory, bool dontSet pathEntries.Add("END"); - path = String.Join(separator, pathEntries.ToArray()) + separator + path; + path = string.Join(separator, pathEntries.ToArray()) + separator + path; psi.EnvironmentVariables["GHU_FULLPATH"] = path; - psi.EnvironmentVariables["PATH"] = path; + psi.EnvironmentVariables[pathEnvVarKey] = path; //TODO: Remove with Git LFS Locking becomes standard psi.EnvironmentVariables["GITLFSLOCKSENABLED"] = "1"; @@ -102,11 +102,11 @@ public void Configure(ProcessStartInfo psi, NPath workingDirectory, bool dontSet } var httpProxy = Environment.GetEnvironmentVariable("HTTP_PROXY"); - if (!String.IsNullOrEmpty(httpProxy)) + if (!string.IsNullOrEmpty(httpProxy)) psi.EnvironmentVariables["HTTP_PROXY"] = httpProxy; var httpsProxy = Environment.GetEnvironmentVariable("HTTPS_PROXY"); - if (!String.IsNullOrEmpty(httpsProxy)) + if (!string.IsNullOrEmpty(httpsProxy)) psi.EnvironmentVariables["HTTPS_PROXY"] = httpsProxy; psi.EnvironmentVariables["DISPLAY"] = "0"; } diff --git a/src/GitHub.Api/OutputProcessors/ProcessManager.cs b/src/GitHub.Api/Process/ProcessManager.cs similarity index 99% rename from src/GitHub.Api/OutputProcessors/ProcessManager.cs rename to src/GitHub.Api/Process/ProcessManager.cs index f67c6a6c2..3cf767a91 100644 --- a/src/GitHub.Api/OutputProcessors/ProcessManager.cs +++ b/src/GitHub.Api/Process/ProcessManager.cs @@ -7,7 +7,7 @@ namespace GitHub.Unity { - class ProcessManager : IProcessManager + public class ProcessManager : IProcessManager { private static readonly ILogging logger = LogHelper.GetLogger(); diff --git a/src/GitHub.Api/Properties/AssemblyInfo.cs b/src/GitHub.Api/Properties/AssemblyInfo.cs index 1c8fb5107..ecf892a01 100644 --- a/src/GitHub.Api/Properties/AssemblyInfo.cs +++ b/src/GitHub.Api/Properties/AssemblyInfo.cs @@ -3,6 +3,7 @@ using System.Runtime.InteropServices; [assembly: AssemblyTitle("GitHub.Api")] -[assembly: AssemblyDescription("GitHub Api")] +[assembly: AssemblyDescription("GitHub for Unity API")] [assembly: Guid("4B424108-D0E8-4BF9-9B0C-4FB49E532AB9")] [assembly: InternalsVisibleTo("GitHub.Unity")] +[assembly: InternalsVisibleTo("GitHub.Unity.45")] diff --git a/src/GitHub.Api/Resources/.gitattributes b/src/GitHub.Api/Resources/.gitattributes index 8f8c2db53..c8cb4a6c4 100644 --- a/src/GitHub.Api/Resources/.gitattributes +++ b/src/GitHub.Api/Resources/.gitattributes @@ -1,10 +1,28 @@ * text=auto # Unity files -*.meta -text -merge=unityamlmerge -*.unity -text -merge=unityamlmerge -*.asset -text -merge=unityamlmerge -*.prefab -text -merge=unityamlmerge +*.meta -text merge=unityyamlmerge diff +*.unity -text merge=unityyamlmerge diff +*.asset -text merge=unityyamlmerge diff +*.prefab -text merge=unityyamlmerge diff +*.mat -text merge=unityyamlmerge diff +*.anim -text merge=unityyamlmerge diff +*.controller -text merge=unityyamlmerge diff +*.overrideController -text merge=unityyamlmerge diff +*.physicMaterial -text merge=unityyamlmerge diff +*.physicsMaterial2D -text merge=unityyamlmerge diff +*.playable -text merge=unityyamlmerge diff +*.mask -text merge=unityyamlmerge diff +*.brush -text merge=unityyamlmerge diff +*.flare -text merge=unityyamlmerge diff +*.fontsettings -text merge=unityyamlmerge diff +*.guiskin -text merge=unityyamlmerge diff +*.giparams -text merge=unityyamlmerge diff +*.renderTexture -text merge=unityyamlmerge diff +*.spriteatlas -text merge=unityyamlmerge diff +*.terrainlayer -text merge=unityyamlmerge diff +*.mixer -text merge=unityyamlmerge diff +*.shadervariants -text merge=unityyamlmerge diff # Image formats *.psd filter=lfs diff=lfs merge=lfs -text @@ -14,9 +32,11 @@ *.bmp filter=lfs diff=lfs merge=lfs -text *.tga filter=lfs diff=lfs merge=lfs -text *.tiff filter=lfs diff=lfs merge=lfs -text +*.tif filter=lfs diff=lfs merge=lfs -text *.iff filter=lfs diff=lfs merge=lfs -text *.pict filter=lfs diff=lfs merge=lfs -text *.dds filter=lfs diff=lfs merge=lfs -text +*.xcf filter=lfs diff=lfs merge=lfs -text # Audio formats *.mp3 filter=lfs diff=lfs merge=lfs -text @@ -63,4 +83,4 @@ *.7z filter=lfs diff=lfs merge=lfs -text *.gz filter=lfs diff=lfs merge=lfs -text *.rar filter=lfs diff=lfs merge=lfs -text -*.tar filter=lfs diff=lfs merge=lfs -text \ No newline at end of file +*.tar filter=lfs diff=lfs merge=lfs -text diff --git a/src/GitHub.Api/Resources/.gitignore b/src/GitHub.Api/Resources/.gitignore index 25f674308..2538b1d2e 100644 --- a/src/GitHub.Api/Resources/.gitignore +++ b/src/GitHub.Api/Resources/.gitignore @@ -1,13 +1,18 @@ -/[Ll]ibrary/ -/[Tt]emp/ -/[Oo]bj/ -/[Bb]uild/ -/[Bb]uilds/ +[Ll]ibrary/ +[Tt]emp/ +[Oo]bj/ +[Bb]uild/ +[Bb]uilds/ +[Ll]ogs/ -/Assets/AssetStoreTools* +# Uncomment this line if you wish to ignore the asset store tools plugin +# [Aa]ssets/AssetStoreTools* -# Visual Studio 2015 cache directory -/.vs/ +# Visual Studio cache directory +.vs/ + +# Gradle cache directory +.gradle/ # Autogenerated VS/MD/Consulo solution and project files ExportedObj/ @@ -23,14 +28,21 @@ ExportedObj/ *.booproj *.svd *.pdb - +*.mdb +*.opendb +*.VC.db # Unity3D generated meta files *.pidb.meta +*.pdb.meta +*.mdb.meta -# Unity3D Generated File On Crash Reports +# Unity3D generated file on crash reports sysinfo.txt # Builds *.apk -*.unitypackage \ No newline at end of file +*.unitypackage + +# Crashlytics generated file +crashlytics-build.properties diff --git a/src/GitHub.Api/Resources/octorun.zip b/src/GitHub.Api/Resources/octorun.zip index 8a661c7ca..7b1374c9d 100644 --- a/src/GitHub.Api/Resources/octorun.zip +++ b/src/GitHub.Api/Resources/octorun.zip @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:802c9a15337ce6692f8c4c215a131b755358972cb3a7e76139193855770ab1c8 -size 214371 +oid sha256:95fe1967a6d00af4abb3d34b897769451f78bdf87ad132ba2b526264ac36e14a +size 214195 diff --git a/src/GitHub.Api/Resources/octorun.zip.md5 b/src/GitHub.Api/Resources/octorun.zip.md5 index d2a4cc5cd..70d55debe 100644 --- a/src/GitHub.Api/Resources/octorun.zip.md5 +++ b/src/GitHub.Api/Resources/octorun.zip.md5 @@ -1 +1 @@ -0a49f36d2e8df01456f832c6968a6782 +3ad23df7f5076a6fbd7d3ce03ad919cc diff --git a/src/GitHub.Api/SharpZipLib/Checksums/Adler32.cs b/src/GitHub.Api/SharpZipLib/Checksums/Adler32.cs new file mode 100644 index 000000000..b0fc04a43 --- /dev/null +++ b/src/GitHub.Api/SharpZipLib/Checksums/Adler32.cs @@ -0,0 +1,237 @@ +// Adler32.cs - Computes Adler32 data checksum of a data stream +// Copyright (C) 2001 Mike Krueger +// +// This file was translated from java, it was part of the GNU Classpath +// Copyright (C) 1999, 2000, 2001 Free Software Foundation, Inc. +// +// This program is free software; you can redistribute it and/or +// modify it under the terms of the GNU General Public License +// as published by the Free Software Foundation; either version 2 +// of the License, or (at your option) any later version. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. +// +// You should have received a copy of the GNU General Public License +// along with this program; if not, write to the Free Software +// Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. +// +// Linking this library statically or dynamically with other modules is +// making a combined work based on this library. Thus, the terms and +// conditions of the GNU General Public License cover the whole +// combination. +// +// As a special exception, the copyright holders of this library give you +// permission to link this library with independent modules to produce an +// executable, regardless of the license terms of these independent +// modules, and to copy and distribute the resulting executable under +// terms of your choice, provided that you also meet, for each linked +// independent module, the terms and conditions of the license of that +// module. An independent module is a module which is not derived from +// or based on this library. If you modify this library, you may extend +// this exception to your version of the library, but you are not +// obligated to do so. If you do not wish to do so, delete this +// exception statement from your version. + +using System; + +namespace GitHub.ICSharpCode.SharpZipLib.Checksums +{ + + /// + /// Computes Adler32 checksum for a stream of data. An Adler32 + /// checksum is not as reliable as a CRC32 checksum, but a lot faster to + /// compute. + /// + /// The specification for Adler32 may be found in RFC 1950. + /// ZLIB Compressed Data Format Specification version 3.3) + /// + /// + /// From that document: + /// + /// "ADLER32 (Adler-32 checksum) + /// This contains a checksum value of the uncompressed data + /// (excluding any dictionary data) computed according to Adler-32 + /// algorithm. This algorithm is a 32-bit extension and improvement + /// of the Fletcher algorithm, used in the ITU-T X.224 / ISO 8073 + /// standard. + /// + /// Adler-32 is composed of two sums accumulated per byte: s1 is + /// the sum of all bytes, s2 is the sum of all s1 values. Both sums + /// are done modulo 65521. s1 is initialized to 1, s2 to zero. The + /// Adler-32 checksum is stored as s2*65536 + s1 in most- + /// significant-byte first (network) order." + /// + /// "8.2. The Adler-32 algorithm + /// + /// The Adler-32 algorithm is much faster than the CRC32 algorithm yet + /// still provides an extremely low probability of undetected errors. + /// + /// The modulo on unsigned long accumulators can be delayed for 5552 + /// bytes, so the modulo operation time is negligible. If the bytes + /// are a, b, c, the second sum is 3a + 2b + c + 3, and so is position + /// and order sensitive, unlike the first sum, which is just a + /// checksum. That 65521 is prime is important to avoid a possible + /// large class of two-byte errors that leave the check unchanged. + /// (The Fletcher checksum uses 255, which is not prime and which also + /// makes the Fletcher check insensitive to single byte changes 0 - + /// 255.) + /// + /// The sum s1 is initialized to 1 instead of zero to make the length + /// of the sequence part of s2, so that the length does not have to be + /// checked separately. (Any sequence of zeroes has a Fletcher + /// checksum of zero.)" + /// + /// + /// + public sealed class Adler32 : IChecksum + { + /// + /// largest prime smaller than 65536 + /// + const uint BASE = 65521; + + /// + /// Returns the Adler32 data checksum computed so far. + /// + public long Value { + get { + return checksum; + } + } + + /// + /// Creates a new instance of the Adler32 class. + /// The checksum starts off with a value of 1. + /// + public Adler32() + { + Reset(); + } + + /// + /// Resets the Adler32 checksum to the initial value. + /// + public void Reset() + { + checksum = 1; + } + + /// + /// Updates the checksum with a byte value. + /// + /// + /// The data value to add. The high byte of the int is ignored. + /// + public void Update(int value) + { + // We could make a length 1 byte array and call update again, but I + // would rather not have that overhead + uint s1 = checksum & 0xFFFF; + uint s2 = checksum >> 16; + + s1 = (s1 + ((uint)value & 0xFF)) % BASE; + s2 = (s1 + s2) % BASE; + + checksum = (s2 << 16) + s1; + } + + /// + /// Updates the checksum with an array of bytes. + /// + /// + /// The source of the data to update with. + /// + public void Update(byte[] buffer) + { + if ( buffer == null ) { + throw new ArgumentNullException("buffer"); + } + + Update(buffer, 0, buffer.Length); + } + + /// + /// Updates the checksum with the bytes taken from the array. + /// + /// + /// an array of bytes + /// + /// + /// the start of the data used for this update + /// + /// + /// the number of bytes to use for this update + /// + public void Update(byte[] buffer, int offset, int count) + { + if (buffer == null) { + throw new ArgumentNullException("buffer"); + } + + if (offset < 0) { +#if NETCF_1_0 + throw new ArgumentOutOfRangeException("offset"); +#else + throw new ArgumentOutOfRangeException("offset", "cannot be negative"); +#endif + } + + if ( count < 0 ) + { +#if NETCF_1_0 + throw new ArgumentOutOfRangeException("count"); +#else + throw new ArgumentOutOfRangeException("count", "cannot be negative"); +#endif + } + + if (offset >= buffer.Length) + { +#if NETCF_1_0 + throw new ArgumentOutOfRangeException("offset"); +#else + throw new ArgumentOutOfRangeException("offset", "not a valid index into buffer"); +#endif + } + + if (offset + count > buffer.Length) + { +#if NETCF_1_0 + throw new ArgumentOutOfRangeException("count"); +#else + throw new ArgumentOutOfRangeException("count", "exceeds buffer size"); +#endif + } + + //(By Per Bothner) + uint s1 = checksum & 0xFFFF; + uint s2 = checksum >> 16; + + while (count > 0) { + // We can defer the modulo operation: + // s1 maximally grows from 65521 to 65521 + 255 * 3800 + // s2 maximally grows by 3800 * median(s1) = 2090079800 < 2^31 + int n = 3800; + if (n > count) { + n = count; + } + count -= n; + while (--n >= 0) { + s1 = s1 + (uint)(buffer[offset++] & 0xff); + s2 = s2 + s1; + } + s1 %= BASE; + s2 %= BASE; + } + + checksum = (s2 << 16) | s1; + } + + #region Instance Fields + uint checksum; + #endregion + } +} diff --git a/src/GitHub.Api/SharpZipLib/Checksums/CRC32.cs b/src/GitHub.Api/SharpZipLib/Checksums/CRC32.cs new file mode 100644 index 000000000..086594dbe --- /dev/null +++ b/src/GitHub.Api/SharpZipLib/Checksums/CRC32.cs @@ -0,0 +1,223 @@ +// CRC32.cs - Computes CRC32 data checksum of a data stream +// Copyright (C) 2001 Mike Krueger +// +// This file was translated from java, it was part of the GNU Classpath +// Copyright (C) 1999, 2000, 2001 Free Software Foundation, Inc. +// +// This program is free software; you can redistribute it and/or +// modify it under the terms of the GNU General Public License +// as published by the Free Software Foundation; either version 2 +// of the License, or (at your option) any later version. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. +// +// You should have received a copy of the GNU General Public License +// along with this program; if not, write to the Free Software +// Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. +// +// Linking this library statically or dynamically with other modules is +// making a combined work based on this library. Thus, the terms and +// conditions of the GNU General Public License cover the whole +// combination. +// +// As a special exception, the copyright holders of this library give you +// permission to link this library with independent modules to produce an +// executable, regardless of the license terms of these independent +// modules, and to copy and distribute the resulting executable under +// terms of your choice, provided that you also meet, for each linked +// independent module, the terms and conditions of the license of that +// module. An independent module is a module which is not derived from +// or based on this library. If you modify this library, you may extend +// this exception to your version of the library, but you are not +// obligated to do so. If you do not wish to do so, delete this +// exception statement from your version. + +using System; + +namespace GitHub.ICSharpCode.SharpZipLib.Checksums +{ + + /// + /// Generate a table for a byte-wise 32-bit CRC calculation on the polynomial: + /// x^32+x^26+x^23+x^22+x^16+x^12+x^11+x^10+x^8+x^7+x^5+x^4+x^2+x+1. + /// + /// Polynomials over GF(2) are represented in binary, one bit per coefficient, + /// with the lowest powers in the most significant bit. Then adding polynomials + /// is just exclusive-or, and multiplying a polynomial by x is a right shift by + /// one. If we call the above polynomial p, and represent a byte as the + /// polynomial q, also with the lowest power in the most significant bit (so the + /// byte 0xb1 is the polynomial x^7+x^3+x+1), then the CRC is (q*x^32) mod p, + /// where a mod b means the remainder after dividing a by b. + /// + /// This calculation is done using the shift-register method of multiplying and + /// taking the remainder. The register is initialized to zero, and for each + /// incoming bit, x^32 is added mod p to the register if the bit is a one (where + /// x^32 mod p is p+x^32 = x^26+...+1), and the register is multiplied mod p by + /// x (which is shifting right by one and adding x^32 mod p if the bit shifted + /// out is a one). We start with the highest power (least significant bit) of + /// q and repeat for all eight bits of q. + /// + /// The table is simply the CRC of all possible eight bit values. This is all + /// the information needed to generate CRC's on data a byte at a time for all + /// combinations of CRC register values and incoming bytes. + /// + public sealed class Crc32 : IChecksum + { + const uint CrcSeed = 0xFFFFFFFF; + + readonly static uint[] CrcTable = new uint[] { + 0x00000000, 0x77073096, 0xEE0E612C, 0x990951BA, 0x076DC419, + 0x706AF48F, 0xE963A535, 0x9E6495A3, 0x0EDB8832, 0x79DCB8A4, + 0xE0D5E91E, 0x97D2D988, 0x09B64C2B, 0x7EB17CBD, 0xE7B82D07, + 0x90BF1D91, 0x1DB71064, 0x6AB020F2, 0xF3B97148, 0x84BE41DE, + 0x1ADAD47D, 0x6DDDE4EB, 0xF4D4B551, 0x83D385C7, 0x136C9856, + 0x646BA8C0, 0xFD62F97A, 0x8A65C9EC, 0x14015C4F, 0x63066CD9, + 0xFA0F3D63, 0x8D080DF5, 0x3B6E20C8, 0x4C69105E, 0xD56041E4, + 0xA2677172, 0x3C03E4D1, 0x4B04D447, 0xD20D85FD, 0xA50AB56B, + 0x35B5A8FA, 0x42B2986C, 0xDBBBC9D6, 0xACBCF940, 0x32D86CE3, + 0x45DF5C75, 0xDCD60DCF, 0xABD13D59, 0x26D930AC, 0x51DE003A, + 0xC8D75180, 0xBFD06116, 0x21B4F4B5, 0x56B3C423, 0xCFBA9599, + 0xB8BDA50F, 0x2802B89E, 0x5F058808, 0xC60CD9B2, 0xB10BE924, + 0x2F6F7C87, 0x58684C11, 0xC1611DAB, 0xB6662D3D, 0x76DC4190, + 0x01DB7106, 0x98D220BC, 0xEFD5102A, 0x71B18589, 0x06B6B51F, + 0x9FBFE4A5, 0xE8B8D433, 0x7807C9A2, 0x0F00F934, 0x9609A88E, + 0xE10E9818, 0x7F6A0DBB, 0x086D3D2D, 0x91646C97, 0xE6635C01, + 0x6B6B51F4, 0x1C6C6162, 0x856530D8, 0xF262004E, 0x6C0695ED, + 0x1B01A57B, 0x8208F4C1, 0xF50FC457, 0x65B0D9C6, 0x12B7E950, + 0x8BBEB8EA, 0xFCB9887C, 0x62DD1DDF, 0x15DA2D49, 0x8CD37CF3, + 0xFBD44C65, 0x4DB26158, 0x3AB551CE, 0xA3BC0074, 0xD4BB30E2, + 0x4ADFA541, 0x3DD895D7, 0xA4D1C46D, 0xD3D6F4FB, 0x4369E96A, + 0x346ED9FC, 0xAD678846, 0xDA60B8D0, 0x44042D73, 0x33031DE5, + 0xAA0A4C5F, 0xDD0D7CC9, 0x5005713C, 0x270241AA, 0xBE0B1010, + 0xC90C2086, 0x5768B525, 0x206F85B3, 0xB966D409, 0xCE61E49F, + 0x5EDEF90E, 0x29D9C998, 0xB0D09822, 0xC7D7A8B4, 0x59B33D17, + 0x2EB40D81, 0xB7BD5C3B, 0xC0BA6CAD, 0xEDB88320, 0x9ABFB3B6, + 0x03B6E20C, 0x74B1D29A, 0xEAD54739, 0x9DD277AF, 0x04DB2615, + 0x73DC1683, 0xE3630B12, 0x94643B84, 0x0D6D6A3E, 0x7A6A5AA8, + 0xE40ECF0B, 0x9309FF9D, 0x0A00AE27, 0x7D079EB1, 0xF00F9344, + 0x8708A3D2, 0x1E01F268, 0x6906C2FE, 0xF762575D, 0x806567CB, + 0x196C3671, 0x6E6B06E7, 0xFED41B76, 0x89D32BE0, 0x10DA7A5A, + 0x67DD4ACC, 0xF9B9DF6F, 0x8EBEEFF9, 0x17B7BE43, 0x60B08ED5, + 0xD6D6A3E8, 0xA1D1937E, 0x38D8C2C4, 0x4FDFF252, 0xD1BB67F1, + 0xA6BC5767, 0x3FB506DD, 0x48B2364B, 0xD80D2BDA, 0xAF0A1B4C, + 0x36034AF6, 0x41047A60, 0xDF60EFC3, 0xA867DF55, 0x316E8EEF, + 0x4669BE79, 0xCB61B38C, 0xBC66831A, 0x256FD2A0, 0x5268E236, + 0xCC0C7795, 0xBB0B4703, 0x220216B9, 0x5505262F, 0xC5BA3BBE, + 0xB2BD0B28, 0x2BB45A92, 0x5CB36A04, 0xC2D7FFA7, 0xB5D0CF31, + 0x2CD99E8B, 0x5BDEAE1D, 0x9B64C2B0, 0xEC63F226, 0x756AA39C, + 0x026D930A, 0x9C0906A9, 0xEB0E363F, 0x72076785, 0x05005713, + 0x95BF4A82, 0xE2B87A14, 0x7BB12BAE, 0x0CB61B38, 0x92D28E9B, + 0xE5D5BE0D, 0x7CDCEFB7, 0x0BDBDF21, 0x86D3D2D4, 0xF1D4E242, + 0x68DDB3F8, 0x1FDA836E, 0x81BE16CD, 0xF6B9265B, 0x6FB077E1, + 0x18B74777, 0x88085AE6, 0xFF0F6A70, 0x66063BCA, 0x11010B5C, + 0x8F659EFF, 0xF862AE69, 0x616BFFD3, 0x166CCF45, 0xA00AE278, + 0xD70DD2EE, 0x4E048354, 0x3903B3C2, 0xA7672661, 0xD06016F7, + 0x4969474D, 0x3E6E77DB, 0xAED16A4A, 0xD9D65ADC, 0x40DF0B66, + 0x37D83BF0, 0xA9BCAE53, 0xDEBB9EC5, 0x47B2CF7F, 0x30B5FFE9, + 0xBDBDF21C, 0xCABAC28A, 0x53B39330, 0x24B4A3A6, 0xBAD03605, + 0xCDD70693, 0x54DE5729, 0x23D967BF, 0xB3667A2E, 0xC4614AB8, + 0x5D681B02, 0x2A6F2B94, 0xB40BBE37, 0xC30C8EA1, 0x5A05DF1B, + 0x2D02EF8D + }; + + internal static uint ComputeCrc32(uint oldCrc, byte value) + { + return (uint)(Crc32.CrcTable[(oldCrc ^ value) & 0xFF] ^ (oldCrc >> 8)); + } + + /// + /// The crc data checksum so far. + /// + uint crc; + + /// + /// Returns the CRC32 data checksum computed so far. + /// + public long Value { + get { + return (long)crc; + } + set { + crc = (uint)value; + } + } + + /// + /// Resets the CRC32 data checksum as if no update was ever called. + /// + public void Reset() + { + crc = 0; + } + + /// + /// Updates the checksum with the int bval. + /// + /// + /// the byte is taken as the lower 8 bits of value + /// + public void Update(int value) + { + crc ^= CrcSeed; + crc = CrcTable[(crc ^ value) & 0xFF] ^ (crc >> 8); + crc ^= CrcSeed; + } + + /// + /// Updates the checksum with the bytes taken from the array. + /// + /// + /// buffer an array of bytes + /// + public void Update(byte[] buffer) + { + if (buffer == null) { + throw new ArgumentNullException("buffer"); + } + + Update(buffer, 0, buffer.Length); + } + + /// + /// Adds the byte array to the data checksum. + /// + /// + /// The buffer which contains the data + /// + /// + /// The offset in the buffer where the data starts + /// + /// + /// The number of data bytes to update the CRC with. + /// + public void Update(byte[] buffer, int offset, int count) + { + if (buffer == null) { + throw new ArgumentNullException("buffer"); + } + + if ( count < 0 ) { +#if NETCF_1_0 + throw new ArgumentOutOfRangeException("count"); +#else + throw new ArgumentOutOfRangeException("count", "Count cannot be less than zero"); +#endif + } + + if (offset < 0 || offset + count > buffer.Length) { + throw new ArgumentOutOfRangeException("offset"); + } + + crc ^= CrcSeed; + + while (--count >= 0) { + crc = CrcTable[(crc ^ buffer[offset++]) & 0xFF] ^ (crc >> 8); + } + + crc ^= CrcSeed; + } + } +} diff --git a/src/GitHub.Api/SharpZipLib/Checksums/IChecksum.cs b/src/GitHub.Api/SharpZipLib/Checksums/IChecksum.cs new file mode 100644 index 000000000..6ff24e024 --- /dev/null +++ b/src/GitHub.Api/SharpZipLib/Checksums/IChecksum.cs @@ -0,0 +1,93 @@ +// IChecksum.cs - Interface to compute a data checksum +// Copyright (C) 2001 Mike Krueger +// +// This file was translated from java, it was part of the GNU Classpath +// Copyright (C) 1999, 2000, 2001 Free Software Foundation, Inc. +// +// This program is free software; you can redistribute it and/or +// modify it under the terms of the GNU General Public License +// as published by the Free Software Foundation; either version 2 +// of the License, or (at your option) any later version. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. +// +// You should have received a copy of the GNU General Public License +// along with this program; if not, write to the Free Software +// Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. +// +// Linking this library statically or dynamically with other modules is +// making a combined work based on this library. Thus, the terms and +// conditions of the GNU General Public License cover the whole +// combination. +// +// As a special exception, the copyright holders of this library give you +// permission to link this library with independent modules to produce an +// executable, regardless of the license terms of these independent +// modules, and to copy and distribute the resulting executable under +// terms of your choice, provided that you also meet, for each linked +// independent module, the terms and conditions of the license of that +// module. An independent module is a module which is not derived from +// or based on this library. If you modify this library, you may extend +// this exception to your version of the library, but you are not +// obligated to do so. If you do not wish to do so, delete this +// exception statement from your version. + +namespace GitHub.ICSharpCode.SharpZipLib.Checksums +{ + + /// + /// Interface to compute a data checksum used by checked input/output streams. + /// A data checksum can be updated by one byte or with a byte array. After each + /// update the value of the current checksum can be returned by calling + /// getValue. The complete checksum object can also be reset + /// so it can be used again with new data. + /// + public interface IChecksum + { + /// + /// Returns the data checksum computed so far. + /// + long Value + { + get; + } + + /// + /// Resets the data checksum as if no update was ever called. + /// + void Reset(); + + /// + /// Adds one byte to the data checksum. + /// + /// + /// the data value to add. The high byte of the int is ignored. + /// + void Update(int value); + + /// + /// Updates the data checksum with the bytes taken from the array. + /// + /// + /// buffer an array of bytes + /// + void Update(byte[] buffer); + + /// + /// Adds the byte array to the data checksum. + /// + /// + /// The buffer which contains the data + /// + /// + /// The offset in the buffer where the data starts + /// + /// + /// the number of data bytes to add. + /// + void Update(byte[] buffer, int offset, int count); + } +} diff --git a/src/GitHub.Api/SharpZipLib/Checksums/StrangeCRC.cs b/src/GitHub.Api/SharpZipLib/Checksums/StrangeCRC.cs new file mode 100644 index 000000000..6d61a6723 --- /dev/null +++ b/src/GitHub.Api/SharpZipLib/Checksums/StrangeCRC.cs @@ -0,0 +1,208 @@ +// StrangeCRC.cs - computes a crc used in the bziplib +// +// Copyright (C) 2001 Mike Krueger +// +// This file was translated from java, it was part of the GNU Classpath +// Copyright (C) 1999, 2000, 2001 Free Software Foundation, Inc. +// +// This program is free software; you can redistribute it and/or +// modify it under the terms of the GNU General Public License +// as published by the Free Software Foundation; either version 2 +// of the License, or (at your option) any later version. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. +// +// You should have received a copy of the GNU General Public License +// along with this program; if not, write to the Free Software +// Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. +// +// Linking this library statically or dynamically with other modules is +// making a combined work based on this library. Thus, the terms and +// conditions of the GNU General Public License cover the whole +// combination. +// +// As a special exception, the copyright holders of this library give you +// permission to link this library with independent modules to produce an +// executable, regardless of the license terms of these independent +// modules, and to copy and distribute the resulting executable under +// terms of your choice, provided that you also meet, for each linked +// independent module, the terms and conditions of the license of that +// module. An independent module is a module which is not derived from +// or based on this library. If you modify this library, you may extend +// this exception to your version of the library, but you are not +// obligated to do so. If you do not wish to do so, delete this +// exception statement from your version. + +using System; + +namespace GitHub.ICSharpCode.SharpZipLib.Checksums +{ + /// + /// Bzip2 checksum algorithm + /// + public class StrangeCRC : IChecksum + { + readonly static uint[] crc32Table = { + 0x00000000, 0x04c11db7, 0x09823b6e, 0x0d4326d9, + 0x130476dc, 0x17c56b6b, 0x1a864db2, 0x1e475005, + 0x2608edb8, 0x22c9f00f, 0x2f8ad6d6, 0x2b4bcb61, + 0x350c9b64, 0x31cd86d3, 0x3c8ea00a, 0x384fbdbd, + 0x4c11db70, 0x48d0c6c7, 0x4593e01e, 0x4152fda9, + 0x5f15adac, 0x5bd4b01b, 0x569796c2, 0x52568b75, + 0x6a1936c8, 0x6ed82b7f, 0x639b0da6, 0x675a1011, + 0x791d4014, 0x7ddc5da3, 0x709f7b7a, 0x745e66cd, + 0x9823b6e0, 0x9ce2ab57, 0x91a18d8e, 0x95609039, + 0x8b27c03c, 0x8fe6dd8b, 0x82a5fb52, 0x8664e6e5, + 0xbe2b5b58, 0xbaea46ef, 0xb7a96036, 0xb3687d81, + 0xad2f2d84, 0xa9ee3033, 0xa4ad16ea, 0xa06c0b5d, + 0xd4326d90, 0xd0f37027, 0xddb056fe, 0xd9714b49, + 0xc7361b4c, 0xc3f706fb, 0xceb42022, 0xca753d95, + 0xf23a8028, 0xf6fb9d9f, 0xfbb8bb46, 0xff79a6f1, + 0xe13ef6f4, 0xe5ffeb43, 0xe8bccd9a, 0xec7dd02d, + 0x34867077, 0x30476dc0, 0x3d044b19, 0x39c556ae, + 0x278206ab, 0x23431b1c, 0x2e003dc5, 0x2ac12072, + 0x128e9dcf, 0x164f8078, 0x1b0ca6a1, 0x1fcdbb16, + 0x018aeb13, 0x054bf6a4, 0x0808d07d, 0x0cc9cdca, + 0x7897ab07, 0x7c56b6b0, 0x71159069, 0x75d48dde, + 0x6b93dddb, 0x6f52c06c, 0x6211e6b5, 0x66d0fb02, + 0x5e9f46bf, 0x5a5e5b08, 0x571d7dd1, 0x53dc6066, + 0x4d9b3063, 0x495a2dd4, 0x44190b0d, 0x40d816ba, + 0xaca5c697, 0xa864db20, 0xa527fdf9, 0xa1e6e04e, + 0xbfa1b04b, 0xbb60adfc, 0xb6238b25, 0xb2e29692, + 0x8aad2b2f, 0x8e6c3698, 0x832f1041, 0x87ee0df6, + 0x99a95df3, 0x9d684044, 0x902b669d, 0x94ea7b2a, + 0xe0b41de7, 0xe4750050, 0xe9362689, 0xedf73b3e, + 0xf3b06b3b, 0xf771768c, 0xfa325055, 0xfef34de2, + 0xc6bcf05f, 0xc27dede8, 0xcf3ecb31, 0xcbffd686, + 0xd5b88683, 0xd1799b34, 0xdc3abded, 0xd8fba05a, + 0x690ce0ee, 0x6dcdfd59, 0x608edb80, 0x644fc637, + 0x7a089632, 0x7ec98b85, 0x738aad5c, 0x774bb0eb, + 0x4f040d56, 0x4bc510e1, 0x46863638, 0x42472b8f, + 0x5c007b8a, 0x58c1663d, 0x558240e4, 0x51435d53, + 0x251d3b9e, 0x21dc2629, 0x2c9f00f0, 0x285e1d47, + 0x36194d42, 0x32d850f5, 0x3f9b762c, 0x3b5a6b9b, + 0x0315d626, 0x07d4cb91, 0x0a97ed48, 0x0e56f0ff, + 0x1011a0fa, 0x14d0bd4d, 0x19939b94, 0x1d528623, + 0xf12f560e, 0xf5ee4bb9, 0xf8ad6d60, 0xfc6c70d7, + 0xe22b20d2, 0xe6ea3d65, 0xeba91bbc, 0xef68060b, + 0xd727bbb6, 0xd3e6a601, 0xdea580d8, 0xda649d6f, + 0xc423cd6a, 0xc0e2d0dd, 0xcda1f604, 0xc960ebb3, + 0xbd3e8d7e, 0xb9ff90c9, 0xb4bcb610, 0xb07daba7, + 0xae3afba2, 0xaafbe615, 0xa7b8c0cc, 0xa379dd7b, + 0x9b3660c6, 0x9ff77d71, 0x92b45ba8, 0x9675461f, + 0x8832161a, 0x8cf30bad, 0x81b02d74, 0x857130c3, + 0x5d8a9099, 0x594b8d2e, 0x5408abf7, 0x50c9b640, + 0x4e8ee645, 0x4a4ffbf2, 0x470cdd2b, 0x43cdc09c, + 0x7b827d21, 0x7f436096, 0x7200464f, 0x76c15bf8, + 0x68860bfd, 0x6c47164a, 0x61043093, 0x65c52d24, + 0x119b4be9, 0x155a565e, 0x18197087, 0x1cd86d30, + 0x029f3d35, 0x065e2082, 0x0b1d065b, 0x0fdc1bec, + 0x3793a651, 0x3352bbe6, 0x3e119d3f, 0x3ad08088, + 0x2497d08d, 0x2056cd3a, 0x2d15ebe3, 0x29d4f654, + 0xc5a92679, 0xc1683bce, 0xcc2b1d17, 0xc8ea00a0, + 0xd6ad50a5, 0xd26c4d12, 0xdf2f6bcb, 0xdbee767c, + 0xe3a1cbc1, 0xe760d676, 0xea23f0af, 0xeee2ed18, + 0xf0a5bd1d, 0xf464a0aa, 0xf9278673, 0xfde69bc4, + 0x89b8fd09, 0x8d79e0be, 0x803ac667, 0x84fbdbd0, + 0x9abc8bd5, 0x9e7d9662, 0x933eb0bb, 0x97ffad0c, + 0xafb010b1, 0xab710d06, 0xa6322bdf, 0xa2f33668, + 0xbcb4666d, 0xb8757bda, 0xb5365d03, 0xb1f740b4 + }; + + int globalCrc; + + /// + /// Initialise a default instance of + /// + public StrangeCRC() + { + Reset(); + } + + /// + /// Reset the state of Crc. + /// + public void Reset() + { + globalCrc = -1; + } + + /// + /// Get the current Crc value. + /// + public long Value { + get { + return ~globalCrc; + } + } + + /// + /// Update the Crc value. + /// + /// data update is based on + public void Update(int value) + { + int temp = (globalCrc >> 24) ^ value; + if (temp < 0) { + temp = 256 + temp; + } + globalCrc = unchecked((int)((globalCrc << 8) ^ crc32Table[temp])); + } + + /// + /// Update Crc based on a block of data + /// + /// The buffer containing data to update the crc with. + public void Update(byte[] buffer) + { + if (buffer == null) { + throw new ArgumentNullException("buffer"); + } + + Update(buffer, 0, buffer.Length); + } + + /// + /// Update Crc based on a portion of a block of data + /// + /// block of data + /// index of first byte to use + /// number of bytes to use + public void Update(byte[] buffer, int offset, int count) + { + if (buffer == null) { + throw new ArgumentNullException("buffer"); + } + + if ( offset < 0 ) + { +#if NETCF_1_0 + throw new ArgumentOutOfRangeException("offset"); +#else + throw new ArgumentOutOfRangeException("offset", "cannot be less than zero"); +#endif + } + + if ( count < 0 ) + { +#if NETCF_1_0 + throw new ArgumentOutOfRangeException("count"); +#else + throw new ArgumentOutOfRangeException("count", "cannot be less than zero"); +#endif + } + + if ( offset + count > buffer.Length ) + { + throw new ArgumentOutOfRangeException("count"); + } + + for (int i = 0; i < count; ++i) { + Update(buffer[offset++]); + } + } + } +} diff --git a/src/GitHub.Api/SharpZipLib/Core/FileSystemScanner.cs b/src/GitHub.Api/SharpZipLib/Core/FileSystemScanner.cs new file mode 100644 index 000000000..52c8bd2e1 --- /dev/null +++ b/src/GitHub.Api/SharpZipLib/Core/FileSystemScanner.cs @@ -0,0 +1,533 @@ +// FileSystemScanner.cs +// +// Copyright 2005 John Reilly +// +// This program is free software; you can redistribute it and/or +// modify it under the terms of the GNU General Public License +// as published by the Free Software Foundation; either version 2 +// of the License, or (at your option) any later version. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. +// +// You should have received a copy of the GNU General Public License +// along with this program; if not, write to the Free Software +// Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. +// +// Linking this library statically or dynamically with other modules is +// making a combined work based on this library. Thus, the terms and +// conditions of the GNU General Public License cover the whole +// combination. +// +// As a special exception, the copyright holders of this library give you +// permission to link this library with independent modules to produce an +// executable, regardless of the license terms of these independent +// modules, and to copy and distribute the resulting executable under +// terms of your choice, provided that you also meet, for each linked +// independent module, the terms and conditions of the license of that +// module. An independent module is a module which is not derived from +// or based on this library. If you modify this library, you may extend +// this exception to your version of the library, but you are not +// obligated to do so. If you do not wish to do so, delete this +// exception statement from your version. + + +using System; + +namespace GitHub.ICSharpCode.SharpZipLib.Core +{ + #region EventArgs + /// + /// Event arguments for scanning. + /// + public class ScanEventArgs : EventArgs + { + #region Constructors + /// + /// Initialise a new instance of + /// + /// The file or directory name. + public ScanEventArgs(string name) + { + name_ = name; + } + #endregion + + /// + /// The file or directory name for this event. + /// + public string Name + { + get { return name_; } + } + + /// + /// Get set a value indicating if scanning should continue or not. + /// + public bool ContinueRunning + { + get { return continueRunning_; } + set { continueRunning_ = value; } + } + + #region Instance Fields + string name_; + bool continueRunning_ = true; + #endregion + } + + /// + /// Event arguments during processing of a single file or directory. + /// + public class ProgressEventArgs : EventArgs + { + #region Constructors + /// + /// Initialise a new instance of + /// + /// The file or directory name if known. + /// The number of bytes processed so far + /// The total number of bytes to process, 0 if not known + public ProgressEventArgs(string name, long processed, long target) + { + name_ = name; + processed_ = processed; + target_ = target; + } + #endregion + + /// + /// The name for this event if known. + /// + public string Name + { + get { return name_; } + } + + /// + /// Get set a value indicating wether scanning should continue or not. + /// + public bool ContinueRunning + { + get { return continueRunning_; } + set { continueRunning_ = value; } + } + + /// + /// Get a percentage representing how much of the has been processed + /// + /// 0.0 to 100.0 percent; 0 if target is not known. + public float PercentComplete + { + get + { + float result; + if (target_ <= 0) + { + result = 0; + } + else + { + result = ((float)processed_ / (float)target_) * 100.0f; + } + return result; + } + } + + /// + /// The number of bytes processed so far + /// + public long Processed + { + get { return processed_; } + } + + /// + /// The number of bytes to process. + /// + /// Target may be 0 or negative if the value isnt known. + public long Target + { + get { return target_; } + } + + #region Instance Fields + string name_; + long processed_; + long target_; + bool continueRunning_ = true; + #endregion + } + + /// + /// Event arguments for directories. + /// + public class DirectoryEventArgs : ScanEventArgs + { + #region Constructors + /// + /// Initialize an instance of . + /// + /// The name for this directory. + /// Flag value indicating if any matching files are contained in this directory. + public DirectoryEventArgs(string name, bool hasMatchingFiles) + : base (name) + { + hasMatchingFiles_ = hasMatchingFiles; + } + #endregion + + /// + /// Get a value indicating if the directory contains any matching files or not. + /// + public bool HasMatchingFiles + { + get { return hasMatchingFiles_; } + } + + #region Instance Fields + bool hasMatchingFiles_; + #endregion + } + + /// + /// Arguments passed when scan failures are detected. + /// + public class ScanFailureEventArgs : EventArgs + { + #region Constructors + /// + /// Initialise a new instance of + /// + /// The name to apply. + /// The exception to use. + public ScanFailureEventArgs(string name, Exception e) + { + name_ = name; + exception_ = e; + continueRunning_ = true; + } + #endregion + + /// + /// The applicable name. + /// + public string Name + { + get { return name_; } + } + + /// + /// The applicable exception. + /// + public Exception Exception + { + get { return exception_; } + } + + /// + /// Get / set a value indicating wether scanning should continue. + /// + public bool ContinueRunning + { + get { return continueRunning_; } + set { continueRunning_ = value; } + } + + #region Instance Fields + string name_; + Exception exception_; + bool continueRunning_; + #endregion + } + + #endregion + + #region Delegates + /// + /// Delegate invoked before starting to process a directory. + /// + public delegate void ProcessDirectoryHandler(object sender, DirectoryEventArgs e); + + /// + /// Delegate invoked before starting to process a file. + /// + /// The source of the event + /// The event arguments. + public delegate void ProcessFileHandler(object sender, ScanEventArgs e); + + /// + /// Delegate invoked during processing of a file or directory + /// + /// The source of the event + /// The event arguments. + public delegate void ProgressHandler(object sender, ProgressEventArgs e); + + /// + /// Delegate invoked when a file has been completely processed. + /// + /// The source of the event + /// The event arguments. + public delegate void CompletedFileHandler(object sender, ScanEventArgs e); + + /// + /// Delegate invoked when a directory failure is detected. + /// + /// The source of the event + /// The event arguments. + public delegate void DirectoryFailureHandler(object sender, ScanFailureEventArgs e); + + /// + /// Delegate invoked when a file failure is detected. + /// + /// The source of the event + /// The event arguments. + public delegate void FileFailureHandler(object sender, ScanFailureEventArgs e); + #endregion + + /// + /// FileSystemScanner provides facilities scanning of files and directories. + /// + public class FileSystemScanner + { + #region Constructors + /// + /// Initialise a new instance of + /// + /// The file filter to apply when scanning. + public FileSystemScanner(string filter) + { + fileFilter_ = new PathFilter(filter); + } + + /// + /// Initialise a new instance of + /// + /// The file filter to apply. + /// The directory filter to apply. + public FileSystemScanner(string fileFilter, string directoryFilter) + { + fileFilter_ = new PathFilter(fileFilter); + directoryFilter_ = new PathFilter(directoryFilter); + } + + /// + /// Initialise a new instance of + /// + /// The file filter to apply. + public FileSystemScanner(IScanFilter fileFilter) + { + fileFilter_ = fileFilter; + } + + /// + /// Initialise a new instance of + /// + /// The file filter to apply. + /// The directory filter to apply. + public FileSystemScanner(IScanFilter fileFilter, IScanFilter directoryFilter) + { + fileFilter_ = fileFilter; + directoryFilter_ = directoryFilter; + } + #endregion + + #region Delegates + /// + /// Delegate to invoke when a directory is processed. + /// + public ProcessDirectoryHandler ProcessDirectory; + + /// + /// Delegate to invoke when a file is processed. + /// + public ProcessFileHandler ProcessFile; + + /// + /// Delegate to invoke when processing for a file has finished. + /// + public CompletedFileHandler CompletedFile; + + /// + /// Delegate to invoke when a directory failure is detected. + /// + public DirectoryFailureHandler DirectoryFailure; + + /// + /// Delegate to invoke when a file failure is detected. + /// + public FileFailureHandler FileFailure; + #endregion + + /// + /// Raise the DirectoryFailure event. + /// + /// The directory name. + /// The exception detected. + bool OnDirectoryFailure(string directory, Exception e) + { + DirectoryFailureHandler handler = DirectoryFailure; + bool result = (handler != null); + if ( result ) { + ScanFailureEventArgs args = new ScanFailureEventArgs(directory, e); + handler(this, args); + alive_ = args.ContinueRunning; + } + return result; + } + + /// + /// Raise the FileFailure event. + /// + /// The file name. + /// The exception detected. + bool OnFileFailure(string file, Exception e) + { + FileFailureHandler handler = FileFailure; + + bool result = (handler != null); + + if ( result ){ + ScanFailureEventArgs args = new ScanFailureEventArgs(file, e); + FileFailure(this, args); + alive_ = args.ContinueRunning; + } + return result; + } + + /// + /// Raise the ProcessFile event. + /// + /// The file name. + void OnProcessFile(string file) + { + ProcessFileHandler handler = ProcessFile; + + if ( handler!= null ) { + ScanEventArgs args = new ScanEventArgs(file); + handler(this, args); + alive_ = args.ContinueRunning; + } + } + + /// + /// Raise the complete file event + /// + /// The file name + void OnCompleteFile(string file) + { + CompletedFileHandler handler = CompletedFile; + + if (handler != null) + { + ScanEventArgs args = new ScanEventArgs(file); + handler(this, args); + alive_ = args.ContinueRunning; + } + } + + /// + /// Raise the ProcessDirectory event. + /// + /// The directory name. + /// Flag indicating if the directory has matching files. + void OnProcessDirectory(string directory, bool hasMatchingFiles) + { + ProcessDirectoryHandler handler = ProcessDirectory; + + if ( handler != null ) { + DirectoryEventArgs args = new DirectoryEventArgs(directory, hasMatchingFiles); + handler(this, args); + alive_ = args.ContinueRunning; + } + } + + /// + /// Scan a directory. + /// + /// The base directory to scan. + /// True to recurse subdirectories, false to scan a single directory. + public void Scan(string directory, bool recurse) + { + alive_ = true; + ScanDir(directory, recurse); + } + + void ScanDir(string directory, bool recurse) + { + + try { + string[] names = System.IO.Directory.GetFiles(directory); + bool hasMatch = false; + for (int fileIndex = 0; fileIndex < names.Length; ++fileIndex) { + if ( !fileFilter_.IsMatch(names[fileIndex]) ) { + names[fileIndex] = null; + } else { + hasMatch = true; + } + } + + OnProcessDirectory(directory, hasMatch); + + if ( alive_ && hasMatch ) { + foreach (string fileName in names) { + try { + if ( fileName != null ) { + OnProcessFile(fileName); + if ( !alive_ ) { + break; + } + } + } + catch (Exception e) { + if (!OnFileFailure(fileName, e)) { + throw; + } + } + } + } + } + catch (Exception e) { + if (!OnDirectoryFailure(directory, e)) { + throw; + } + } + + if ( alive_ && recurse ) { + try { + string[] names = System.IO.Directory.GetDirectories(directory); + foreach (string fulldir in names) { + if ((directoryFilter_ == null) || (directoryFilter_.IsMatch(fulldir))) { + ScanDir(fulldir, true); + if ( !alive_ ) { + break; + } + } + } + } + catch (Exception e) { + if (!OnDirectoryFailure(directory, e)) { + throw; + } + } + } + } + + #region Instance Fields + /// + /// The file filter currently in use. + /// + IScanFilter fileFilter_; + /// + /// The directory filter currently in use. + /// + IScanFilter directoryFilter_; + /// + /// Flag indicating if scanning should continue running. + /// + bool alive_; + #endregion + } +} diff --git a/src/GitHub.Api/SharpZipLib/Core/INameTransform.cs b/src/GitHub.Api/SharpZipLib/Core/INameTransform.cs new file mode 100644 index 000000000..7e5b025ae --- /dev/null +++ b/src/GitHub.Api/SharpZipLib/Core/INameTransform.cs @@ -0,0 +1,57 @@ +// INameTransform.cs +// +// Copyright 2005 John Reilly +// +// This program is free software; you can redistribute it and/or +// modify it under the terms of the GNU General Public License +// as published by the Free Software Foundation; either version 2 +// of the License, or (at your option) any later version. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. +// +// You should have received a copy of the GNU General Public License +// along with this program; if not, write to the Free Software +// Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. +// +// Linking this library statically or dynamically with other modules is +// making a combined work based on this library. Thus, the terms and +// conditions of the GNU General Public License cover the whole +// combination. +// +// As a special exception, the copyright holders of this library give you +// permission to link this library with independent modules to produce an +// executable, regardless of the license terms of these independent +// modules, and to copy and distribute the resulting executable under +// terms of your choice, provided that you also meet, for each linked +// independent module, the terms and conditions of the license of that +// module. An independent module is a module which is not derived from +// or based on this library. If you modify this library, you may extend +// this exception to your version of the library, but you are not +// obligated to do so. If you do not wish to do so, delete this +// exception statement from your version. + +namespace GitHub.ICSharpCode.SharpZipLib.Core +{ + /// + /// INameTransform defines how file system names are transformed for use with archives, or vice versa. + /// + public interface INameTransform + { + /// + /// Given a file name determine the transformed value. + /// + /// The name to transform. + /// The transformed file name. + string TransformFile(string name); + + /// + /// Given a directory name determine the transformed value. + /// + /// The name to transform. + /// The transformed directory name + string TransformDirectory(string name); + } +} diff --git a/src/GitHub.Api/SharpZipLib/Core/IScanFilter.cs b/src/GitHub.Api/SharpZipLib/Core/IScanFilter.cs new file mode 100644 index 000000000..1a29bca1f --- /dev/null +++ b/src/GitHub.Api/SharpZipLib/Core/IScanFilter.cs @@ -0,0 +1,50 @@ +// IScanFilter.cs +// +// Copyright 2006 John Reilly +// +// This program is free software; you can redistribute it and/or +// modify it under the terms of the GNU General Public License +// as published by the Free Software Foundation; either version 2 +// of the License, or (at your option) any later version. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. +// +// You should have received a copy of the GNU General Public License +// along with this program; if not, write to the Free Software +// Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. +// +// Linking this library statically or dynamically with other modules is +// making a combined work based on this library. Thus, the terms and +// conditions of the GNU General Public License cover the whole +// combination. +// +// As a special exception, the copyright holders of this library give you +// permission to link this library with independent modules to produce an +// executable, regardless of the license terms of these independent +// modules, and to copy and distribute the resulting executable under +// terms of your choice, provided that you also meet, for each linked +// independent module, the terms and conditions of the license of that +// module. An independent module is a module which is not derived from +// or based on this library. If you modify this library, you may extend +// this exception to your version of the library, but you are not +// obligated to do so. If you do not wish to do so, delete this +// exception statement from your version. + +namespace GitHub.ICSharpCode.SharpZipLib.Core +{ + /// + /// Scanning filters support filtering of names. + /// + public interface IScanFilter + { + /// + /// Test a name to see if it 'matches' the filter. + /// + /// The name to test. + /// Returns true if the name matches the filter, false if it does not match. + bool IsMatch(string name); + } +} diff --git a/src/GitHub.Api/SharpZipLib/Core/NameFilter.cs b/src/GitHub.Api/SharpZipLib/Core/NameFilter.cs new file mode 100644 index 000000000..83fee778d --- /dev/null +++ b/src/GitHub.Api/SharpZipLib/Core/NameFilter.cs @@ -0,0 +1,290 @@ +// NameFilter.cs +// +// Copyright 2005 John Reilly +// +// This program is free software; you can redistribute it and/or +// modify it under the terms of the GNU General Public License +// as published by the Free Software Foundation; either version 2 +// of the License, or (at your option) any later version. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. +// +// You should have received a copy of the GNU General Public License +// along with this program; if not, write to the Free Software +// Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. +// +// Linking this library statically or dynamically with other modules is +// making a combined work based on this library. Thus, the terms and +// conditions of the GNU General Public License cover the whole +// combination. +// +// As a special exception, the copyright holders of this library give you +// permission to link this library with independent modules to produce an +// executable, regardless of the license terms of these independent +// modules, and to copy and distribute the resulting executable under +// terms of your choice, provided that you also meet, for each linked +// independent module, the terms and conditions of the license of that +// module. An independent module is a module which is not derived from +// or based on this library. If you modify this library, you may extend +// this exception to your version of the library, but you are not +// obligated to do so. If you do not wish to do so, delete this +// exception statement from your version. + +// HISTORY +// 2010-03-03 Z-1654 Fixed bug where escape characters were excluded in SplitQuoted() + +using System; +using System.Collections; +using System.Text; +using System.Text.RegularExpressions; + +namespace GitHub.ICSharpCode.SharpZipLib.Core +{ + /// + /// NameFilter is a string matching class which allows for both positive and negative + /// matching. + /// A filter is a sequence of independant regular expressions separated by semi-colons ';'. + /// To include a semi-colon it may be quoted as in \;. Each expression can be prefixed by a plus '+' sign or + /// a minus '-' sign to denote the expression is intended to include or exclude names. + /// If neither a plus or minus sign is found include is the default. + /// A given name is tested for inclusion before checking exclusions. Only names matching an include spec + /// and not matching an exclude spec are deemed to match the filter. + /// An empty filter matches any name. + /// + /// The following expression includes all name ending in '.dat' with the exception of 'dummy.dat' + /// "+\.dat$;-^dummy\.dat$" + /// + public class NameFilter : IScanFilter + { + #region Constructors + /// + /// Construct an instance based on the filter expression passed + /// + /// The filter expression. + public NameFilter(string filter) + { + filter_ = filter; + inclusions_ = new ArrayList(); + exclusions_ = new ArrayList(); + Compile(); + } + #endregion + + /// + /// Test a string to see if it is a valid regular expression. + /// + /// The expression to test. + /// True if expression is a valid false otherwise. + public static bool IsValidExpression(string expression) + { + bool result = true; + try { + Regex exp = new Regex(expression, RegexOptions.IgnoreCase | RegexOptions.Singleline); + } + catch (ArgumentException) { + result = false; + } + return result; + } + + /// + /// Test an expression to see if it is valid as a filter. + /// + /// The filter expression to test. + /// True if the expression is valid, false otherwise. + public static bool IsValidFilterExpression(string toTest) + { + if ( toTest == null ) { + throw new ArgumentNullException("toTest"); + } + + bool result = true; + + try { + string[] items = SplitQuoted(toTest); + for (int i = 0; i < items.Length; ++i) { + if ((items[i] != null) && (items[i].Length > 0)) { + string toCompile; + + if (items[i][0] == '+') { + toCompile = items[i].Substring(1, items[i].Length - 1); + } + else if (items[i][0] == '-') { + toCompile = items[i].Substring(1, items[i].Length - 1); + } + else { + toCompile = items[i]; + } + + Regex testRegex = new Regex(toCompile, RegexOptions.IgnoreCase | RegexOptions.Singleline); + } + } + } + catch (ArgumentException) { + result = false; + } + + return result; + } + + /// + /// Split a string into its component pieces + /// + /// The original string + /// Returns an array of values containing the individual filter elements. + public static string[] SplitQuoted(string original) + { + char escape = '\\'; + char[] separators = { ';' }; + + ArrayList result = new ArrayList(); + + if ((original != null) && (original.Length > 0)) { + int endIndex = -1; + StringBuilder b = new StringBuilder(); + + while (endIndex < original.Length) { + endIndex += 1; + if (endIndex >= original.Length) { + result.Add(b.ToString()); + } + else if (original[endIndex] == escape) { + endIndex += 1; + if (endIndex >= original.Length) { +#if NETCF_1_0 + throw new ArgumentException("Missing terminating escape character"); +#else + throw new ArgumentException("Missing terminating escape character", "original"); +#endif + } + // include escape if this is not an escaped separator + if (Array.IndexOf(separators, original[endIndex]) < 0) + b.Append(escape); + + b.Append(original[endIndex]); + } + else { + if (Array.IndexOf(separators, original[endIndex]) >= 0) { + result.Add(b.ToString()); + b.Length = 0; + } + else { + b.Append(original[endIndex]); + } + } + } + } + + return (string[])result.ToArray(typeof(string)); + } + + /// + /// Convert this filter to its string equivalent. + /// + /// The string equivalent for this filter. + public override string ToString() + { + return filter_; + } + + /// + /// Test a value to see if it is included by the filter. + /// + /// The value to test. + /// True if the value is included, false otherwise. + public bool IsIncluded(string name) + { + bool result = false; + if ( inclusions_.Count == 0 ) { + result = true; + } + else { + foreach ( Regex r in inclusions_ ) { + if ( r.IsMatch(name) ) { + result = true; + break; + } + } + } + return result; + } + + /// + /// Test a value to see if it is excluded by the filter. + /// + /// The value to test. + /// True if the value is excluded, false otherwise. + public bool IsExcluded(string name) + { + bool result = false; + foreach ( Regex r in exclusions_ ) { + if ( r.IsMatch(name) ) { + result = true; + break; + } + } + return result; + } + + #region IScanFilter Members + /// + /// Test a value to see if it matches the filter. + /// + /// The value to test. + /// True if the value matches, false otherwise. + public bool IsMatch(string name) + { + return (IsIncluded(name) && !IsExcluded(name)); + } + #endregion + + /// + /// Compile this filter. + /// + void Compile() + { + // TODO: Check to see if combining RE's makes it faster/smaller. + // simple scheme would be to have one RE for inclusion and one for exclusion. + if ( filter_ == null ) { + return; + } + + string[] items = SplitQuoted(filter_); + for ( int i = 0; i < items.Length; ++i ) { + if ( (items[i] != null) && (items[i].Length > 0) ) { + bool include = (items[i][0] != '-'); + string toCompile; + + if ( items[i][0] == '+' ) { + toCompile = items[i].Substring(1, items[i].Length - 1); + } + else if ( items[i][0] == '-' ) { + toCompile = items[i].Substring(1, items[i].Length - 1); + } + else { + toCompile = items[i]; + } + + // NOTE: Regular expressions can fail to compile here for a number of reasons that cause an exception + // these are left unhandled here as the caller is responsible for ensuring all is valid. + // several functions IsValidFilterExpression and IsValidExpression are provided for such checking + if ( include ) { + inclusions_.Add(new Regex(toCompile, RegexOptions.IgnoreCase | RegexOptions.Compiled | RegexOptions.Singleline)); + } + else { + exclusions_.Add(new Regex(toCompile, RegexOptions.IgnoreCase | RegexOptions.Compiled | RegexOptions.Singleline)); + } + } + } + } + + #region Instance Fields + string filter_; + ArrayList inclusions_; + ArrayList exclusions_; + #endregion + } +} diff --git a/src/GitHub.Api/SharpZipLib/Core/PathFilter.cs b/src/GitHub.Api/SharpZipLib/Core/PathFilter.cs new file mode 100644 index 000000000..662bce785 --- /dev/null +++ b/src/GitHub.Api/SharpZipLib/Core/PathFilter.cs @@ -0,0 +1,334 @@ +// PathFilter.cs +// +// Copyright 2005 John Reilly +// +// This program is free software; you can redistribute it and/or +// modify it under the terms of the GNU General Public License +// as published by the Free Software Foundation; either version 2 +// of the License, or (at your option) any later version. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. +// +// You should have received a copy of the GNU General Public License +// along with this program; if not, write to the Free Software +// Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. +// +// Linking this library statically or dynamically with other modules is +// making a combined work based on this library. Thus, the terms and +// conditions of the GNU General Public License cover the whole +// combination. +// +// As a special exception, the copyright holders of this library give you +// permission to link this library with independent modules to produce an +// executable, regardless of the license terms of these independent +// modules, and to copy and distribute the resulting executable under +// terms of your choice, provided that you also meet, for each linked +// independent module, the terms and conditions of the license of that +// module. An independent module is a module which is not derived from +// or based on this library. If you modify this library, you may extend +// this exception to your version of the library, but you are not +// obligated to do so. If you do not wish to do so, delete this +// exception statement from your version. + +using System; +using System.IO; + +namespace GitHub.ICSharpCode.SharpZipLib.Core +{ + /// + /// PathFilter filters directories and files using a form of regular expressions + /// by full path name. + /// See NameFilter for more detail on filtering. + /// + public class PathFilter : IScanFilter + { + #region Constructors + /// + /// Initialise a new instance of . + /// + /// The filter expression to apply. + public PathFilter(string filter) + { + nameFilter_ = new NameFilter(filter); + } + #endregion + + #region IScanFilter Members + /// + /// Test a name to see if it matches the filter. + /// + /// The name to test. + /// True if the name matches, false otherwise. + /// is used to get the full path before matching. + public virtual bool IsMatch(string name) + { + bool result = false; + + if ( name != null ) { + string cooked = (name.Length > 0) ? Path.GetFullPath(name) : ""; + result = nameFilter_.IsMatch(cooked); + } + return result; + } + #endregion + + #region Instance Fields + NameFilter nameFilter_; + #endregion + } + + /// + /// ExtendedPathFilter filters based on name, file size, and the last write time of the file. + /// + /// Provides an example of how to customise filtering. + public class ExtendedPathFilter : PathFilter + { + #region Constructors + /// + /// Initialise a new instance of ExtendedPathFilter. + /// + /// The filter to apply. + /// The minimum file size to include. + /// The maximum file size to include. + public ExtendedPathFilter(string filter, + long minSize, long maxSize) + : base(filter) + { + MinSize = minSize; + MaxSize = maxSize; + } + + /// + /// Initialise a new instance of ExtendedPathFilter. + /// + /// The filter to apply. + /// The minimum to include. + /// The maximum to include. + public ExtendedPathFilter(string filter, + DateTime minDate, DateTime maxDate) + : base(filter) + { + MinDate = minDate; + MaxDate = maxDate; + } + + /// + /// Initialise a new instance of ExtendedPathFilter. + /// + /// The filter to apply. + /// The minimum file size to include. + /// The maximum file size to include. + /// The minimum to include. + /// The maximum to include. + public ExtendedPathFilter(string filter, + long minSize, long maxSize, + DateTime minDate, DateTime maxDate) + : base(filter) + { + MinSize = minSize; + MaxSize = maxSize; + MinDate = minDate; + MaxDate = maxDate; + } + #endregion + + #region IScanFilter Members + /// + /// Test a filename to see if it matches the filter. + /// + /// The filename to test. + /// True if the filter matches, false otherwise. + /// The doesnt exist + public override bool IsMatch(string name) + { + bool result = base.IsMatch(name); + + if ( result ) { + FileInfo fileInfo = new FileInfo(name); + result = + (MinSize <= fileInfo.Length) && + (MaxSize >= fileInfo.Length) && + (MinDate <= fileInfo.LastWriteTime) && + (MaxDate >= fileInfo.LastWriteTime) + ; + } + return result; + } + #endregion + + #region Properties + /// + /// Get/set the minimum size/length for a file that will match this filter. + /// + /// The default value is zero. + /// value is less than zero; greater than + public long MinSize + { + get { return minSize_; } + set + { + if ( (value < 0) || (maxSize_ < value) ) { + throw new ArgumentOutOfRangeException("value"); + } + + minSize_ = value; + } + } + + /// + /// Get/set the maximum size/length for a file that will match this filter. + /// + /// The default value is + /// value is less than zero or less than + public long MaxSize + { + get { return maxSize_; } + set + { + if ( (value < 0) || (minSize_ > value) ) { + throw new ArgumentOutOfRangeException("value"); + } + + maxSize_ = value; + } + } + + /// + /// Get/set the minimum value that will match for this filter. + /// + /// Files with a LastWrite time less than this value are excluded by the filter. + public DateTime MinDate + { + get + { + return minDate_; + } + + set + { + if ( value > maxDate_ ) { +#if NETCF_1_0 + throw new ArgumentOutOfRangeException("value"); +#else + throw new ArgumentOutOfRangeException("value", "Exceeds MaxDate"); +#endif + } + + minDate_ = value; + } + } + + /// + /// Get/set the maximum value that will match for this filter. + /// + /// Files with a LastWrite time greater than this value are excluded by the filter. + public DateTime MaxDate + { + get + { + return maxDate_; + } + + set + { + if ( minDate_ > value ) { +#if NETCF_1_0 + throw new ArgumentOutOfRangeException("value"); +#else + throw new ArgumentOutOfRangeException("value", "Exceeds MinDate"); +#endif + } + + maxDate_ = value; + } + } + #endregion + + #region Instance Fields + long minSize_; + long maxSize_ = long.MaxValue; + DateTime minDate_ = DateTime.MinValue; + DateTime maxDate_ = DateTime.MaxValue; + #endregion + } + + /// + /// NameAndSizeFilter filters based on name and file size. + /// + /// A sample showing how filters might be extended. + [Obsolete("Use ExtendedPathFilter instead")] + public class NameAndSizeFilter : PathFilter + { + + /// + /// Initialise a new instance of NameAndSizeFilter. + /// + /// The filter to apply. + /// The minimum file size to include. + /// The maximum file size to include. + public NameAndSizeFilter(string filter, long minSize, long maxSize) + : base(filter) + { + MinSize = minSize; + MaxSize = maxSize; + } + + /// + /// Test a filename to see if it matches the filter. + /// + /// The filename to test. + /// True if the filter matches, false otherwise. + public override bool IsMatch(string name) + { + bool result = base.IsMatch(name); + + if ( result ) { + FileInfo fileInfo = new FileInfo(name); + long length = fileInfo.Length; + result = + (MinSize <= length) && + (MaxSize >= length); + } + return result; + } + + /// + /// Get/set the minimum size for a file that will match this filter. + /// + public long MinSize + { + get { return minSize_; } + set { + if ( (value < 0) || (maxSize_ < value) ) { + throw new ArgumentOutOfRangeException("value"); + } + + minSize_ = value; + } + } + + /// + /// Get/set the maximum size for a file that will match this filter. + /// + public long MaxSize + { + get { return maxSize_; } + set + { + if ( (value < 0) || (minSize_ > value) ) { + throw new ArgumentOutOfRangeException("value"); + } + + maxSize_ = value; + } + } + + #region Instance Fields + long minSize_; + long maxSize_ = long.MaxValue; + #endregion + } +} diff --git a/src/GitHub.Api/SharpZipLib/Core/StreamUtils.cs b/src/GitHub.Api/SharpZipLib/Core/StreamUtils.cs new file mode 100644 index 000000000..59344d54d --- /dev/null +++ b/src/GitHub.Api/SharpZipLib/Core/StreamUtils.cs @@ -0,0 +1,246 @@ +// StreamUtils.cs +// +// Copyright 2005 John Reilly +// +// This program is free software; you can redistribute it and/or +// modify it under the terms of the GNU General Public License +// as published by the Free Software Foundation; either version 2 +// of the License, or (at your option) any later version. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. +// +// You should have received a copy of the GNU General Public License +// along with this program; if not, write to the Free Software +// Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. +// +// Linking this library statically or dynamically with other modules is +// making a combined work based on this library. Thus, the terms and +// conditions of the GNU General Public License cover the whole +// combination. +// +// As a special exception, the copyright holders of this library give you +// permission to link this library with independent modules to produce an +// executable, regardless of the license terms of these independent +// modules, and to copy and distribute the resulting executable under +// terms of your choice, provided that you also meet, for each linked +// independent module, the terms and conditions of the license of that +// module. An independent module is a module which is not derived from +// or based on this library. If you modify this library, you may extend +// this exception to your version of the library, but you are not +// obligated to do so. If you do not wish to do so, delete this +// exception statement from your version. + +using System; +using System.IO; + +namespace GitHub.ICSharpCode.SharpZipLib.Core +{ + /// + /// Provides simple " utilities. + /// + public sealed class StreamUtils + { + /// + /// Read from a ensuring all the required data is read. + /// + /// The stream to read. + /// The buffer to fill. + /// + static public void ReadFully(Stream stream, byte[] buffer) + { + ReadFully(stream, buffer, 0, buffer.Length); + } + + /// + /// Read from a " ensuring all the required data is read. + /// + /// The stream to read data from. + /// The buffer to store data in. + /// The offset at which to begin storing data. + /// The number of bytes of data to store. + /// Required parameter is null + /// and or are invalid. + /// End of stream is encountered before all the data has been read. + static public void ReadFully(Stream stream, byte[] buffer, int offset, int count) + { + if ( stream == null ) { + throw new ArgumentNullException("stream"); + } + + if ( buffer == null ) { + throw new ArgumentNullException("buffer"); + } + + // Offset can equal length when buffer and count are 0. + if ( (offset < 0) || (offset > buffer.Length) ) { + throw new ArgumentOutOfRangeException("offset"); + } + + if ( (count < 0) || (offset + count > buffer.Length) ) { + throw new ArgumentOutOfRangeException("count"); + } + + while ( count > 0 ) { + int readCount = stream.Read(buffer, offset, count); + if ( readCount <= 0 ) { + throw new EndOfStreamException(); + } + offset += readCount; + count -= readCount; + } + } + + /// + /// Copy the contents of one to another. + /// + /// The stream to source data from. + /// The stream to write data to. + /// The buffer to use during copying. + static public void Copy(Stream source, Stream destination, byte[] buffer) + { + if (source == null) { + throw new ArgumentNullException("source"); + } + + if (destination == null) { + throw new ArgumentNullException("destination"); + } + + if (buffer == null) { + throw new ArgumentNullException("buffer"); + } + + // Ensure a reasonable size of buffer is used without being prohibitive. + if (buffer.Length < 128) { + throw new ArgumentException("Buffer is too small", "buffer"); + } + + bool copying = true; + + while (copying) { + int bytesRead = source.Read(buffer, 0, buffer.Length); + if (bytesRead > 0) { + destination.Write(buffer, 0, bytesRead); + } + else { + destination.Flush(); + copying = false; + } + } + } + + /// + /// Copy the contents of one to another. + /// + /// The stream to source data from. + /// The stream to write data to. + /// The buffer to use during copying. + /// The progress handler delegate to use. + /// The minimum between progress updates. + /// The source for this event. + /// The name to use with the event. + /// This form is specialised for use within #Zip to support events during archive operations. + static public void Copy(Stream source, Stream destination, + byte[] buffer, ProgressHandler progressHandler, TimeSpan updateInterval, object sender, string name) + { + Copy(source, destination, buffer, progressHandler, updateInterval, sender, name, -1); + } + + /// + /// Copy the contents of one to another. + /// + /// The stream to source data from. + /// The stream to write data to. + /// The buffer to use during copying. + /// The progress handler delegate to use. + /// The minimum between progress updates. + /// The source for this event. + /// The name to use with the event. + /// A predetermined fixed target value to use with progress updates. + /// If the value is negative the target is calculated by looking at the stream. + /// This form is specialised for use within #Zip to support events during archive operations. + static public void Copy(Stream source, Stream destination, + byte[] buffer, + ProgressHandler progressHandler, TimeSpan updateInterval, + object sender, string name, long fixedTarget) + { + if (source == null) { + throw new ArgumentNullException("source"); + } + + if (destination == null) { + throw new ArgumentNullException("destination"); + } + + if (buffer == null) { + throw new ArgumentNullException("buffer"); + } + + // Ensure a reasonable size of buffer is used without being prohibitive. + if (buffer.Length < 128) { + throw new ArgumentException("Buffer is too small", "buffer"); + } + + if (progressHandler == null) { + throw new ArgumentNullException("progressHandler"); + } + + bool copying = true; + + DateTime marker = DateTime.Now; + long processed = 0; + long target = 0; + + if (fixedTarget >= 0) { + target = fixedTarget; + } + else if (source.CanSeek) { + target = source.Length - source.Position; + } + + // Always fire 0% progress.. + ProgressEventArgs args = new ProgressEventArgs(name, processed, target); + progressHandler(sender, args); + + bool progressFired = true; + + while (copying) { + int bytesRead = source.Read(buffer, 0, buffer.Length); + if (bytesRead > 0) { + processed += bytesRead; + progressFired = false; + destination.Write(buffer, 0, bytesRead); + } + else { + destination.Flush(); + copying = false; + } + + if (DateTime.Now - marker > updateInterval) { + progressFired = true; + marker = DateTime.Now; + args = new ProgressEventArgs(name, processed, target); + progressHandler(sender, args); + + copying = args.ContinueRunning; + } + } + + if (!progressFired) { + args = new ProgressEventArgs(name, processed, target); + progressHandler(sender, args); + } + } + + /// + /// Initialise an instance of + /// + private StreamUtils() + { + // Do nothing. + } + } +} diff --git a/src/GitHub.Api/SharpZipLib/Core/WindowsPathUtils.cs b/src/GitHub.Api/SharpZipLib/Core/WindowsPathUtils.cs new file mode 100644 index 000000000..820461c56 --- /dev/null +++ b/src/GitHub.Api/SharpZipLib/Core/WindowsPathUtils.cs @@ -0,0 +1,94 @@ +// WindowsPathUtils.cs +// +// Copyright 2007 John Reilly +// +// This program is free software; you can redistribute it and/or +// modify it under the terms of the GNU General Public License +// as published by the Free Software Foundation; either version 2 +// of the License, or (at your option) any later version. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. +// +// You should have received a copy of the GNU General Public License +// along with this program; if not, write to the Free Software +// Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. +// +// Linking this library statically or dynamically with other modules is +// making a combined work based on this library. Thus, the terms and +// conditions of the GNU General Public License cover the whole +// combination. +// +// As a special exception, the copyright holders of this library give you +// permission to link this library with independent modules to produce an +// executable, regardless of the license terms of these independent +// modules, and to copy and distribute the resulting executable under +// terms of your choice, provided that you also meet, for each linked +// independent module, the terms and conditions of the license of that +// module. An independent module is a module which is not derived from +// or based on this library. If you modify this library, you may extend +// this exception to your version of the library, but you are not +// obligated to do so. If you do not wish to do so, delete this +// exception statement from your version. + +namespace GitHub.ICSharpCode.SharpZipLib.Core +{ + /// + /// WindowsPathUtils provides simple utilities for handling windows paths. + /// + public abstract class WindowsPathUtils + { + /// + /// Initializes a new instance of the class. + /// + internal WindowsPathUtils() + { + } + + /// + /// Remove any path root present in the path + /// + /// A containing path information. + /// The path with the root removed if it was present; path otherwise. + /// Unlike the class the path isnt otherwise checked for validity. + public static string DropPathRoot(string path) + { + string result = path; + + if ( (path != null) && (path.Length > 0) ) { + if ((path[0] == '\\') || (path[0] == '/')) { + // UNC name ? + if ((path.Length > 1) && ((path[1] == '\\') || (path[1] == '/'))) { + int index = 2; + int elements = 2; + + // Scan for two separate elements \\machine\share\restofpath + while ((index <= path.Length) && + (((path[index] != '\\') && (path[index] != '/')) || (--elements > 0))) { + index++; + } + + index++; + + if (index < path.Length) { + result = path.Substring(index); + } + else { + result = ""; + } + } + } + else if ((path.Length > 1) && (path[1] == ':')) { + int dropCount = 2; + if ((path.Length > 2) && ((path[2] == '\\') || (path[2] == '/'))) { + dropCount = 3; + } + result = result.Remove(0, dropCount); + } + } + return result; + } + } +} diff --git a/src/GitHub.Api/SharpZipLib/Encryption/PkzipClassic.cs b/src/GitHub.Api/SharpZipLib/Encryption/PkzipClassic.cs new file mode 100644 index 000000000..e030aaa64 --- /dev/null +++ b/src/GitHub.Api/SharpZipLib/Encryption/PkzipClassic.cs @@ -0,0 +1,498 @@ +// +// PkzipClassic encryption +// +// Copyright 2004 John Reilly +// +// This program is free software; you can redistribute it and/or +// modify it under the terms of the GNU General Public License +// as published by the Free Software Foundation; either version 2 +// of the License, or (at your option) any later version. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. +// +// You should have received a copy of the GNU General Public License +// along with this program; if not, write to the Free Software +// Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. +// +// Linking this library statically or dynamically with other modules is +// making a combined work based on this library. Thus, the terms and +// conditions of the GNU General Public License cover the whole +// combination. +// +// As a special exception, the copyright holders of this library give you +// permission to link this library with independent modules to produce an +// executable, regardless of the license terms of these independent +// modules, and to copy and distribute the resulting executable under +// terms of your choice, provided that you also meet, for each linked +// independent module, the terms and conditions of the license of that +// module. An independent module is a module which is not derived from +// or based on this library. If you modify this library, you may extend +// this exception to your version of the library, but you are not +// obligated to do so. If you do not wish to do so, delete this +// exception statement from your version. +// + + +#if !NETCF_1_0 + +using System; +using System.Security.Cryptography; +using GitHub.ICSharpCode.SharpZipLib.Checksums; + +namespace GitHub.ICSharpCode.SharpZipLib.Encryption +{ + /// + /// PkzipClassic embodies the classic or original encryption facilities used in Pkzip archives. + /// While it has been superceded by more recent and more powerful algorithms, its still in use and + /// is viable for preventing casual snooping + /// + public abstract class PkzipClassic : SymmetricAlgorithm + { + /// + /// Generates new encryption keys based on given seed + /// + /// The seed value to initialise keys with. + /// A new key value. + static public byte[] GenerateKeys(byte[] seed) + { + if ( seed == null ) { + throw new ArgumentNullException("seed"); + } + + if ( seed.Length == 0 ) { + throw new ArgumentException("Length is zero", "seed"); + } + + uint[] newKeys = new uint[] { + 0x12345678, + 0x23456789, + 0x34567890 + }; + + for (int i = 0; i < seed.Length; ++i) { + newKeys[0] = Crc32.ComputeCrc32(newKeys[0], seed[i]); + newKeys[1] = newKeys[1] + (byte)newKeys[0]; + newKeys[1] = newKeys[1] * 134775813 + 1; + newKeys[2] = Crc32.ComputeCrc32(newKeys[2], (byte)(newKeys[1] >> 24)); + } + + byte[] result = new byte[12]; + result[0] = (byte)(newKeys[0] & 0xff); + result[1] = (byte)((newKeys[0] >> 8) & 0xff); + result[2] = (byte)((newKeys[0] >> 16) & 0xff); + result[3] = (byte)((newKeys[0] >> 24) & 0xff); + result[4] = (byte)(newKeys[1] & 0xff); + result[5] = (byte)((newKeys[1] >> 8) & 0xff); + result[6] = (byte)((newKeys[1] >> 16) & 0xff); + result[7] = (byte)((newKeys[1] >> 24) & 0xff); + result[8] = (byte)(newKeys[2] & 0xff); + result[9] = (byte)((newKeys[2] >> 8) & 0xff); + result[10] = (byte)((newKeys[2] >> 16) & 0xff); + result[11] = (byte)((newKeys[2] >> 24) & 0xff); + return result; + } + } + + /// + /// PkzipClassicCryptoBase provides the low level facilities for encryption + /// and decryption using the PkzipClassic algorithm. + /// + class PkzipClassicCryptoBase + { + /// + /// Transform a single byte + /// + /// + /// The transformed value + /// + protected byte TransformByte() + { + uint temp = ((keys[2] & 0xFFFF) | 2); + return (byte)((temp * (temp ^ 1)) >> 8); + } + + /// + /// Set the key schedule for encryption/decryption. + /// + /// The data use to set the keys from. + protected void SetKeys(byte[] keyData) + { + if ( keyData == null ) { + throw new ArgumentNullException("keyData"); + } + + if ( keyData.Length != 12 ) { + throw new InvalidOperationException("Key length is not valid"); + } + + keys = new uint[3]; + keys[0] = (uint)((keyData[3] << 24) | (keyData[2] << 16) | (keyData[1] << 8) | keyData[0]); + keys[1] = (uint)((keyData[7] << 24) | (keyData[6] << 16) | (keyData[5] << 8) | keyData[4]); + keys[2] = (uint)((keyData[11] << 24) | (keyData[10] << 16) | (keyData[9] << 8) | keyData[8]); + } + + /// + /// Update encryption keys + /// + protected void UpdateKeys(byte ch) + { + keys[0] = Crc32.ComputeCrc32(keys[0], ch); + keys[1] = keys[1] + (byte)keys[0]; + keys[1] = keys[1] * 134775813 + 1; + keys[2] = Crc32.ComputeCrc32(keys[2], (byte)(keys[1] >> 24)); + } + + /// + /// Reset the internal state. + /// + protected void Reset() + { + keys[0] = 0; + keys[1] = 0; + keys[2] = 0; + } + + #region Instance Fields + uint[] keys; + #endregion + } + + /// + /// PkzipClassic CryptoTransform for encryption. + /// + class PkzipClassicEncryptCryptoTransform : PkzipClassicCryptoBase, ICryptoTransform + { + /// + /// Initialise a new instance of + /// + /// The key block to use. + internal PkzipClassicEncryptCryptoTransform(byte[] keyBlock) + { + SetKeys(keyBlock); + } + + #region ICryptoTransform Members + + /// + /// Transforms the specified region of the specified byte array. + /// + /// The input for which to compute the transform. + /// The offset into the byte array from which to begin using data. + /// The number of bytes in the byte array to use as data. + /// The computed transform. + public byte[] TransformFinalBlock(byte[] inputBuffer, int inputOffset, int inputCount) + { + byte[] result = new byte[inputCount]; + TransformBlock(inputBuffer, inputOffset, inputCount, result, 0); + return result; + } + + /// + /// Transforms the specified region of the input byte array and copies + /// the resulting transform to the specified region of the output byte array. + /// + /// The input for which to compute the transform. + /// The offset into the input byte array from which to begin using data. + /// The number of bytes in the input byte array to use as data. + /// The output to which to write the transform. + /// The offset into the output byte array from which to begin writing data. + /// The number of bytes written. + public int TransformBlock(byte[] inputBuffer, int inputOffset, int inputCount, byte[] outputBuffer, int outputOffset) + { + for (int i = inputOffset; i < inputOffset + inputCount; ++i) { + byte oldbyte = inputBuffer[i]; + outputBuffer[outputOffset++] = (byte)(inputBuffer[i] ^ TransformByte()); + UpdateKeys(oldbyte); + } + return inputCount; + } + + /// + /// Gets a value indicating whether the current transform can be reused. + /// + public bool CanReuseTransform + { + get { + return true; + } + } + + /// + /// Gets the size of the input data blocks in bytes. + /// + public int InputBlockSize + { + get { + return 1; + } + } + + /// + /// Gets the size of the output data blocks in bytes. + /// + public int OutputBlockSize + { + get { + return 1; + } + } + + /// + /// Gets a value indicating whether multiple blocks can be transformed. + /// + public bool CanTransformMultipleBlocks + { + get { + return true; + } + } + + #endregion + + #region IDisposable Members + + /// + /// Cleanup internal state. + /// + public void Dispose() + { + Reset(); + } + + #endregion + } + + + /// + /// PkzipClassic CryptoTransform for decryption. + /// + class PkzipClassicDecryptCryptoTransform : PkzipClassicCryptoBase, ICryptoTransform + { + /// + /// Initialise a new instance of . + /// + /// The key block to decrypt with. + internal PkzipClassicDecryptCryptoTransform(byte[] keyBlock) + { + SetKeys(keyBlock); + } + + #region ICryptoTransform Members + + /// + /// Transforms the specified region of the specified byte array. + /// + /// The input for which to compute the transform. + /// The offset into the byte array from which to begin using data. + /// The number of bytes in the byte array to use as data. + /// The computed transform. + public byte[] TransformFinalBlock(byte[] inputBuffer, int inputOffset, int inputCount) + { + byte[] result = new byte[inputCount]; + TransformBlock(inputBuffer, inputOffset, inputCount, result, 0); + return result; + } + + /// + /// Transforms the specified region of the input byte array and copies + /// the resulting transform to the specified region of the output byte array. + /// + /// The input for which to compute the transform. + /// The offset into the input byte array from which to begin using data. + /// The number of bytes in the input byte array to use as data. + /// The output to which to write the transform. + /// The offset into the output byte array from which to begin writing data. + /// The number of bytes written. + public int TransformBlock(byte[] inputBuffer, int inputOffset, int inputCount, byte[] outputBuffer, int outputOffset) + { + for (int i = inputOffset; i < inputOffset + inputCount; ++i) { + byte newByte = (byte)(inputBuffer[i] ^ TransformByte()); + outputBuffer[outputOffset++] = newByte; + UpdateKeys(newByte); + } + return inputCount; + } + + /// + /// Gets a value indicating whether the current transform can be reused. + /// + public bool CanReuseTransform + { + get { + return true; + } + } + + /// + /// Gets the size of the input data blocks in bytes. + /// + public int InputBlockSize + { + get { + return 1; + } + } + + /// + /// Gets the size of the output data blocks in bytes. + /// + public int OutputBlockSize + { + get { + return 1; + } + } + + /// + /// Gets a value indicating whether multiple blocks can be transformed. + /// + public bool CanTransformMultipleBlocks + { + get { + return true; + } + } + + #endregion + + #region IDisposable Members + + /// + /// Cleanup internal state. + /// + public void Dispose() + { + Reset(); + } + + #endregion + } + + /// + /// Defines a wrapper object to access the Pkzip algorithm. + /// This class cannot be inherited. + /// + public sealed class PkzipClassicManaged : PkzipClassic + { + /// + /// Get / set the applicable block size in bits. + /// + /// The only valid block size is 8. + public override int BlockSize + { + get { + return 8; + } + + set { + if (value != 8) { + throw new CryptographicException("Block size is invalid"); + } + } + } + + /// + /// Get an array of legal key sizes. + /// + public override KeySizes[] LegalKeySizes + { + get { + KeySizes[] keySizes = new KeySizes[1]; + keySizes[0] = new KeySizes(12 * 8, 12 * 8, 0); + return keySizes; + } + } + + /// + /// Generate an initial vector. + /// + public override void GenerateIV() + { + // Do nothing. + } + + /// + /// Get an array of legal block sizes. + /// + public override KeySizes[] LegalBlockSizes + { + get { + KeySizes[] keySizes = new KeySizes[1]; + keySizes[0] = new KeySizes(1 * 8, 1 * 8, 0); + return keySizes; + } + } + + /// + /// Get / set the key value applicable. + /// + public override byte[] Key + { + get { + if ( key_ == null ) { + GenerateKey(); + } + + return (byte[]) key_.Clone(); + } + + set { + if ( value == null ) { + throw new ArgumentNullException("value"); + } + + if ( value.Length != 12 ) { + throw new CryptographicException("Key size is illegal"); + } + + key_ = (byte[]) value.Clone(); + } + } + + /// + /// Generate a new random key. + /// + public override void GenerateKey() + { + key_ = new byte[12]; + Random rnd = new Random(); + rnd.NextBytes(key_); + } + + /// + /// Create an encryptor. + /// + /// The key to use for this encryptor. + /// Initialisation vector for the new encryptor. + /// Returns a new PkzipClassic encryptor + public override ICryptoTransform CreateEncryptor( + byte[] rgbKey, + byte[] rgbIV) + { + key_ = rgbKey; + return new PkzipClassicEncryptCryptoTransform(Key); + } + + /// + /// Create a decryptor. + /// + /// Keys to use for this new decryptor. + /// Initialisation vector for the new decryptor. + /// Returns a new decryptor. + public override ICryptoTransform CreateDecryptor( + byte[] rgbKey, + byte[] rgbIV) + { + key_ = rgbKey; + return new PkzipClassicDecryptCryptoTransform(Key); + } + + #region Instance Fields + byte[] key_; + #endregion + } +} +#endif diff --git a/src/GitHub.Api/SharpZipLib/Encryption/ZipAESStream.cs b/src/GitHub.Api/SharpZipLib/Encryption/ZipAESStream.cs new file mode 100644 index 000000000..8721b4a92 --- /dev/null +++ b/src/GitHub.Api/SharpZipLib/Encryption/ZipAESStream.cs @@ -0,0 +1,170 @@ +// +// ZipAESStream.cs +// +// Copyright 2009 David Pierson +// +// This program is free software; you can redistribute it and/or +// modify it under the terms of the GNU General Public License +// as published by the Free Software Foundation; either version 2 +// of the License, or (at your option) any later version. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. +// +// You should have received a copy of the GNU General Public License +// along with this program; if not, write to the Free Software +// Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. +// +// Linking this library statically or dynamically with other modules is +// making a combined work based on this library. Thus, the terms and +// conditions of the GNU General Public License cover the whole +// combination. +// +// As a special exception, the copyright holders of this library give you +// permission to link this library with independent modules to produce an +// executable, regardless of the license terms of these independent +// modules, and to copy and distribute the resulting executable under +// terms of your choice, provided that you also meet, for each linked +// independent module, the terms and conditions of the license of that +// module. An independent module is a module which is not derived from +// or based on this library. If you modify this library, you may extend +// this exception to your version of the library, but you are not +// obligated to do so. If you do not wish to do so, delete this +// exception statement from your version. +// + +#if !NET_1_1 && !NETCF_2_0 + +using System; +using System.IO; +using System.Security.Cryptography; + +namespace GitHub.ICSharpCode.SharpZipLib.Encryption { + + // Based on information from http://www.winzip.com/aes_info.htm + // and http://www.gladman.me.uk/cryptography_technology/fileencrypt/ + + /// + /// Encrypts and decrypts AES ZIP + /// + internal class ZipAESStream : CryptoStream { + + /// + /// Constructor + /// + /// The stream on which to perform the cryptographic transformation. + /// Instance of ZipAESTransform + /// Read or Write + public ZipAESStream(Stream stream, ZipAESTransform transform, CryptoStreamMode mode) + : base(stream, transform, mode) { + + _stream = stream; + _transform = transform; + _slideBuffer = new byte[1024]; + + _blockAndAuth = CRYPTO_BLOCK_SIZE + AUTH_CODE_LENGTH; + + // mode: + // CryptoStreamMode.Read means we read from "stream" and pass decrypted to our Read() method. + // Write bypasses this stream and uses the Transform directly. + if (mode != CryptoStreamMode.Read) { + throw new Exception("ZipAESStream only for read"); + } + } + + // The final n bytes of the AES stream contain the Auth Code. + private const int AUTH_CODE_LENGTH = 10; + + private Stream _stream; + private ZipAESTransform _transform; + private byte[] _slideBuffer; + private int _slideBufStartPos; + private int _slideBufFreePos; + // Blocksize is always 16 here, even for AES-256 which has transform.InputBlockSize of 32. + private const int CRYPTO_BLOCK_SIZE = 16; + private int _blockAndAuth; + + /// + /// Reads a sequence of bytes from the current CryptoStream into buffer, + /// and advances the position within the stream by the number of bytes read. + /// + public override int Read(byte[] outBuffer, int offset, int count) { + int nBytes = 0; + while (nBytes < count) { + // Calculate buffer quantities vs read-ahead size, and check for sufficient free space + int byteCount = _slideBufFreePos - _slideBufStartPos; + + // Need to handle final block and Auth Code specially, but don't know total data length. + // Maintain a read-ahead equal to the length of (crypto block + Auth Code). + // When that runs out we can detect these final sections. + int lengthToRead = _blockAndAuth - byteCount; + if (_slideBuffer.Length - _slideBufFreePos < lengthToRead) { + // Shift the data to the beginning of the buffer + int iTo = 0; + for (int iFrom = _slideBufStartPos; iFrom < _slideBufFreePos; iFrom++, iTo++) { + _slideBuffer[iTo] = _slideBuffer[iFrom]; + } + _slideBufFreePos -= _slideBufStartPos; // Note the -= + _slideBufStartPos = 0; + } + int obtained = _stream.Read(_slideBuffer, _slideBufFreePos, lengthToRead); + _slideBufFreePos += obtained; + + // Recalculate how much data we now have + byteCount = _slideBufFreePos - _slideBufStartPos; + if (byteCount >= _blockAndAuth) { + // At least a 16 byte block and an auth code remains. + _transform.TransformBlock(_slideBuffer, + _slideBufStartPos, + CRYPTO_BLOCK_SIZE, + outBuffer, + offset); + nBytes += CRYPTO_BLOCK_SIZE; + offset += CRYPTO_BLOCK_SIZE; + _slideBufStartPos += CRYPTO_BLOCK_SIZE; + } else { + // Last round. + if (byteCount > AUTH_CODE_LENGTH) { + // At least one byte of data plus auth code + int finalBlock = byteCount - AUTH_CODE_LENGTH; + _transform.TransformBlock(_slideBuffer, + _slideBufStartPos, + finalBlock, + outBuffer, + offset); + + nBytes += finalBlock; + _slideBufStartPos += finalBlock; + } + else if (byteCount < AUTH_CODE_LENGTH) + throw new Exception("Internal error missed auth code"); // Coding bug + // Final block done. Check Auth code. + byte[] calcAuthCode = _transform.GetAuthCode(); + for (int i = 0; i < AUTH_CODE_LENGTH; i++) { + if (calcAuthCode[i] != _slideBuffer[_slideBufStartPos + i]) { + throw new Exception("AES Authentication Code does not match. This is a super-CRC check on the data in the file after compression and encryption. \r\n" + + "The file may be damaged."); + } + } + + break; // Reached the auth code + } + } + return nBytes; + } + + /// + /// Writes a sequence of bytes to the current stream and advances the current position within this stream by the number of bytes written. + /// + /// An array of bytes. This method copies count bytes from buffer to the current stream. + /// The byte offset in buffer at which to begin copying bytes to the current stream. + /// The number of bytes to be written to the current stream. + public override void Write(byte[] buffer, int offset, int count) { + // ZipAESStream is used for reading but not for writing. Writing uses the ZipAESTransform directly. + throw new NotImplementedException(); + } + } +} +#endif \ No newline at end of file diff --git a/src/GitHub.Api/SharpZipLib/Encryption/ZipAESTransform.cs b/src/GitHub.Api/SharpZipLib/Encryption/ZipAESTransform.cs new file mode 100644 index 000000000..002036cc3 --- /dev/null +++ b/src/GitHub.Api/SharpZipLib/Encryption/ZipAESTransform.cs @@ -0,0 +1,219 @@ +// +// ZipAESTransform.cs +// +// Copyright 2009 David Pierson +// +// This program is free software; you can redistribute it and/or +// modify it under the terms of the GNU General Public License +// as published by the Free Software Foundation; either version 2 +// of the License, or (at your option) any later version. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. +// +// You should have received a copy of the GNU General Public License +// along with this program; if not, write to the Free Software +// Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. +// +// Linking this library statically or dynamically with other modules is +// making a combined work based on this library. Thus, the terms and +// conditions of the GNU General Public License cover the whole +// combination. +// +// As a special exception, the copyright holders of this library give you +// permission to link this library with independent modules to produce an +// executable, regardless of the license terms of these independent +// modules, and to copy and distribute the resulting executable under +// terms of your choice, provided that you also meet, for each linked +// independent module, the terms and conditions of the license of that +// module. An independent module is a module which is not derived from +// or based on this library. If you modify this library, you may extend +// this exception to your version of the library, but you are not +// obligated to do so. If you do not wish to do so, delete this +// exception statement from your version. +// + +#if !NET_1_1 && !NETCF_2_0 +// Framework version 2.0 required for Rfc2898DeriveBytes + +using System; +using System.Security.Cryptography; + +namespace GitHub.ICSharpCode.SharpZipLib.Encryption { + + /// + /// Transforms stream using AES in CTR mode + /// + internal class ZipAESTransform : ICryptoTransform { + + private const int PWD_VER_LENGTH = 2; + + // WinZip use iteration count of 1000 for PBKDF2 key generation + private const int KEY_ROUNDS = 1000; + + // For 128-bit AES (16 bytes) the encryption is implemented as expected. + // For 256-bit AES (32 bytes) WinZip do full 256 bit AES of the nonce to create the encryption + // block but use only the first 16 bytes of it, and discard the second half. + private const int ENCRYPT_BLOCK = 16; + + private int _blockSize; + private ICryptoTransform _encryptor; + private readonly byte[] _counterNonce; + private byte[] _encryptBuffer; + private int _encrPos; + private byte[] _pwdVerifier; + private HMACSHA1 _hmacsha1; + private bool _finalised; + + private bool _writeMode; + + /// + /// Constructor. + /// + /// Password string + /// Random bytes, length depends on encryption strength. + /// 128 bits = 8 bytes, 192 bits = 12 bytes, 256 bits = 16 bytes. + /// The encryption strength, in bytes eg 16 for 128 bits. + /// True when creating a zip, false when reading. For the AuthCode. + /// + public ZipAESTransform(string key, byte[] saltBytes, int blockSize, bool writeMode) { + + if (blockSize != 16 && blockSize != 32) // 24 valid for AES but not supported by Winzip + throw new Exception("Invalid blocksize " + blockSize + ". Must be 16 or 32."); + if (saltBytes.Length != blockSize / 2) + throw new Exception("Invalid salt len. Must be " + blockSize / 2 + " for blocksize " + blockSize); + // initialise the encryption buffer and buffer pos + _blockSize = blockSize; + _encryptBuffer = new byte[_blockSize]; + _encrPos = ENCRYPT_BLOCK; + + // Performs the equivalent of derive_key in Dr Brian Gladman's pwd2key.c + Rfc2898DeriveBytes pdb = new Rfc2898DeriveBytes(key, saltBytes, KEY_ROUNDS); + RijndaelManaged rm = new RijndaelManaged(); + rm.Mode = CipherMode.ECB; // No feedback from cipher for CTR mode + _counterNonce = new byte[_blockSize]; + byte[] byteKey1 = pdb.GetBytes(_blockSize); + byte[] byteKey2 = pdb.GetBytes(_blockSize); + _encryptor = rm.CreateEncryptor(byteKey1, byteKey2); + _pwdVerifier = pdb.GetBytes(PWD_VER_LENGTH); + // + _hmacsha1 = new HMACSHA1(byteKey2); + _writeMode = writeMode; + } + + /// + /// Implement the ICryptoTransform method. + /// + public int TransformBlock(byte[] inputBuffer, int inputOffset, int inputCount, byte[] outputBuffer, int outputOffset) { + + // Pass the data stream to the hash algorithm for generating the Auth Code. + // This does not change the inputBuffer. Do this before decryption for read mode. + if (!_writeMode) { + _hmacsha1.TransformBlock(inputBuffer, inputOffset, inputCount, inputBuffer, inputOffset); + } + // Encrypt with AES in CTR mode. Regards to Dr Brian Gladman for this. + int ix = 0; + while (ix < inputCount) { + if (_encrPos == ENCRYPT_BLOCK) { + /* increment encryption nonce */ + int j = 0; + while (++_counterNonce[j] == 0) { + ++j; + } + /* encrypt the nonce to form next xor buffer */ + _encryptor.TransformBlock(_counterNonce, 0, _blockSize, _encryptBuffer, 0); + _encrPos = 0; + } + outputBuffer[ix + outputOffset] = (byte)(inputBuffer[ix + inputOffset] ^ _encryptBuffer[_encrPos++]); + // + ix++; + } + if (_writeMode) { + // This does not change the buffer. + _hmacsha1.TransformBlock(outputBuffer, outputOffset, inputCount, outputBuffer, outputOffset); + } + return inputCount; + } + + /// + /// Returns the 2 byte password verifier + /// + public byte[] PwdVerifier { + get { + return _pwdVerifier; + } + } + + /// + /// Returns the 10 byte AUTH CODE to be checked or appended immediately following the AES data stream. + /// + public byte[] GetAuthCode() { + // We usually don't get advance notice of final block. Hash requres a TransformFinal. + if (!_finalised) { + byte[] dummy = new byte[0]; + _hmacsha1.TransformFinalBlock(dummy, 0, 0); + _finalised = true; + } + return _hmacsha1.Hash; + } + + #region ICryptoTransform Members + + /// + /// Not implemented. + /// + public byte[] TransformFinalBlock(byte[] inputBuffer, int inputOffset, int inputCount) { + + throw new NotImplementedException("ZipAESTransform.TransformFinalBlock"); + } + + /// + /// Gets the size of the input data blocks in bytes. + /// + public int InputBlockSize { + get { + return _blockSize; + } + } + + /// + /// Gets the size of the output data blocks in bytes. + /// + public int OutputBlockSize { + get { + return _blockSize; + } + } + + /// + /// Gets a value indicating whether multiple blocks can be transformed. + /// + public bool CanTransformMultipleBlocks { + get { + return true; + } + } + + /// + /// Gets a value indicating whether the current transform can be reused. + /// + public bool CanReuseTransform { + get { + return true; + } + } + + /// + /// Cleanup internal state. + /// + public void Dispose() { + _encryptor.Dispose(); + } + + #endregion + + } +} +#endif \ No newline at end of file diff --git a/src/GitHub.Api/SharpZipLib/SharpZipBaseException.cs b/src/GitHub.Api/SharpZipLib/SharpZipBaseException.cs new file mode 100644 index 000000000..227a951cb --- /dev/null +++ b/src/GitHub.Api/SharpZipLib/SharpZipBaseException.cs @@ -0,0 +1,94 @@ +// SharpZipBaseException.cs +// +// Copyright 2004 John Reilly +// +// This program is free software; you can redistribute it and/or +// modify it under the terms of the GNU General Public License +// as published by the Free Software Foundation; either version 2 +// of the License, or (at your option) any later version. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. +// +// You should have received a copy of the GNU General Public License +// along with this program; if not, write to the Free Software +// Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. +// +// Linking this library statically or dynamically with other modules is +// making a combined work based on this library. Thus, the terms and +// conditions of the GNU General Public License cover the whole +// combination. +// +// As a special exception, the copyright holders of this library give you +// permission to link this library with independent modules to produce an +// executable, regardless of the license terms of these independent +// modules, and to copy and distribute the resulting executable under +// terms of your choice, provided that you also meet, for each linked +// independent module, the terms and conditions of the license of that +// module. An independent module is a module which is not derived from +// or based on this library. If you modify this library, you may extend +// this exception to your version of the library, but you are not +// obligated to do so. If you do not wish to do so, delete this +// exception statement from your version. + +using System; + +#if !NETCF_1_0 && !NETCF_2_0 +using System.Runtime.Serialization; +#endif + +namespace GitHub.ICSharpCode.SharpZipLib +{ + /// + /// SharpZipBaseException is the base exception class for the SharpZipLibrary. + /// All library exceptions are derived from this. + /// + /// NOTE: Not all exceptions thrown will be derived from this class. + /// A variety of other exceptions are possible for example +#if !NETCF_1_0 && !NETCF_2_0 + [Serializable] +#endif + public class SharpZipBaseException : ApplicationException + { +#if !NETCF_1_0 && !NETCF_2_0 + /// + /// Deserialization constructor + /// + /// for this constructor + /// for this constructor + protected SharpZipBaseException(SerializationInfo info, StreamingContext context ) + : base( info, context ) + { + } +#endif + + /// + /// Initializes a new instance of the SharpZipBaseException class. + /// + public SharpZipBaseException() + { + } + + /// + /// Initializes a new instance of the SharpZipBaseException class with a specified error message. + /// + /// A message describing the exception. + public SharpZipBaseException(string message) + : base(message) + { + } + + /// + /// Initializes a new instance of the SharpZipBaseException class with a specified + /// error message and a reference to the inner exception that is the cause of this exception. + /// + /// A message describing the exception. + /// The inner exception + public SharpZipBaseException(string message, Exception innerException) + : base(message, innerException) + { + } + } +} diff --git a/src/GitHub.Api/SharpZipLib/Zip/Compression/Deflater.cs b/src/GitHub.Api/SharpZipLib/Zip/Compression/Deflater.cs new file mode 100644 index 000000000..3ba505385 --- /dev/null +++ b/src/GitHub.Api/SharpZipLib/Zip/Compression/Deflater.cs @@ -0,0 +1,557 @@ +// Deflater.cs +// +// Copyright (C) 2001 Mike Krueger +// Copyright (C) 2004 John Reilly +// +// This file was translated from java, it was part of the GNU Classpath +// Copyright (C) 2001 Free Software Foundation, Inc. +// +// This program is free software; you can redistribute it and/or +// modify it under the terms of the GNU General Public License +// as published by the Free Software Foundation; either version 2 +// of the License, or (at your option) any later version. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. +// +// You should have received a copy of the GNU General Public License +// along with this program; if not, write to the Free Software +// Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. +// +// Linking this library statically or dynamically with other modules is +// making a combined work based on this library. Thus, the terms and +// conditions of the GNU General Public License cover the whole +// combination. +// +// As a special exception, the copyright holders of this library give you +// permission to link this library with independent modules to produce an +// executable, regardless of the license terms of these independent +// modules, and to copy and distribute the resulting executable under +// terms of your choice, provided that you also meet, for each linked +// independent module, the terms and conditions of the license of that +// module. An independent module is a module which is not derived from +// or based on this library. If you modify this library, you may extend +// this exception to your version of the library, but you are not +// obligated to do so. If you do not wish to do so, delete this +// exception statement from your version. + +using System; + +namespace GitHub.ICSharpCode.SharpZipLib.Zip.Compression +{ + + /// + /// This is the Deflater class. The deflater class compresses input + /// with the deflate algorithm described in RFC 1951. It has several + /// compression levels and three different strategies described below. + /// + /// This class is not thread safe. This is inherent in the API, due + /// to the split of deflate and setInput. + /// + /// author of the original java version : Jochen Hoenicke + /// + public class Deflater + { + #region Deflater Documentation + /* + * The Deflater can do the following state transitions: + * + * (1) -> INIT_STATE ----> INIT_FINISHING_STATE ---. + * / | (2) (5) | + * / v (5) | + * (3)| SETDICT_STATE ---> SETDICT_FINISHING_STATE |(3) + * \ | (3) | ,--------' + * | | | (3) / + * v v (5) v v + * (1) -> BUSY_STATE ----> FINISHING_STATE + * | (6) + * v + * FINISHED_STATE + * \_____________________________________/ + * | (7) + * v + * CLOSED_STATE + * + * (1) If we should produce a header we start in INIT_STATE, otherwise + * we start in BUSY_STATE. + * (2) A dictionary may be set only when we are in INIT_STATE, then + * we change the state as indicated. + * (3) Whether a dictionary is set or not, on the first call of deflate + * we change to BUSY_STATE. + * (4) -- intentionally left blank -- :) + * (5) FINISHING_STATE is entered, when flush() is called to indicate that + * there is no more INPUT. There are also states indicating, that + * the header wasn't written yet. + * (6) FINISHED_STATE is entered, when everything has been flushed to the + * internal pending output buffer. + * (7) At any time (7) + * + */ + #endregion + #region Public Constants + /// + /// The best and slowest compression level. This tries to find very + /// long and distant string repetitions. + /// + public const int BEST_COMPRESSION = 9; + + /// + /// The worst but fastest compression level. + /// + public const int BEST_SPEED = 1; + + /// + /// The default compression level. + /// + public const int DEFAULT_COMPRESSION = -1; + + /// + /// This level won't compress at all but output uncompressed blocks. + /// + public const int NO_COMPRESSION = 0; + + /// + /// The compression method. This is the only method supported so far. + /// There is no need to use this constant at all. + /// + public const int DEFLATED = 8; + #endregion + #region Local Constants + private const int IS_SETDICT = 0x01; + private const int IS_FLUSHING = 0x04; + private const int IS_FINISHING = 0x08; + + private const int INIT_STATE = 0x00; + private const int SETDICT_STATE = 0x01; + // private static int INIT_FINISHING_STATE = 0x08; + // private static int SETDICT_FINISHING_STATE = 0x09; + private const int BUSY_STATE = 0x10; + private const int FLUSHING_STATE = 0x14; + private const int FINISHING_STATE = 0x1c; + private const int FINISHED_STATE = 0x1e; + private const int CLOSED_STATE = 0x7f; + #endregion + #region Constructors + /// + /// Creates a new deflater with default compression level. + /// + public Deflater() : this(DEFAULT_COMPRESSION, false) + { + + } + + /// + /// Creates a new deflater with given compression level. + /// + /// + /// the compression level, a value between NO_COMPRESSION + /// and BEST_COMPRESSION, or DEFAULT_COMPRESSION. + /// + /// if lvl is out of range. + public Deflater(int level) : this(level, false) + { + + } + + /// + /// Creates a new deflater with given compression level. + /// + /// + /// the compression level, a value between NO_COMPRESSION + /// and BEST_COMPRESSION. + /// + /// + /// true, if we should suppress the Zlib/RFC1950 header at the + /// beginning and the adler checksum at the end of the output. This is + /// useful for the GZIP/PKZIP formats. + /// + /// if lvl is out of range. + public Deflater(int level, bool noZlibHeaderOrFooter) + { + if (level == DEFAULT_COMPRESSION) { + level = 6; + } else if (level < NO_COMPRESSION || level > BEST_COMPRESSION) { + throw new ArgumentOutOfRangeException("level"); + } + + pending = new DeflaterPending(); + engine = new DeflaterEngine(pending); + this.noZlibHeaderOrFooter = noZlibHeaderOrFooter; + SetStrategy(DeflateStrategy.Default); + SetLevel(level); + Reset(); + } + #endregion + + /// + /// Resets the deflater. The deflater acts afterwards as if it was + /// just created with the same compression level and strategy as it + /// had before. + /// + public void Reset() + { + state = (noZlibHeaderOrFooter ? BUSY_STATE : INIT_STATE); + totalOut = 0; + pending.Reset(); + engine.Reset(); + } + + /// + /// Gets the current adler checksum of the data that was processed so far. + /// + public int Adler { + get { + return engine.Adler; + } + } + + /// + /// Gets the number of input bytes processed so far. + /// + public long TotalIn { + get { + return engine.TotalIn; + } + } + + /// + /// Gets the number of output bytes so far. + /// + public long TotalOut { + get { + return totalOut; + } + } + + /// + /// Flushes the current input block. Further calls to deflate() will + /// produce enough output to inflate everything in the current input + /// block. This is not part of Sun's JDK so I have made it package + /// private. It is used by DeflaterOutputStream to implement + /// flush(). + /// + public void Flush() + { + state |= IS_FLUSHING; + } + + /// + /// Finishes the deflater with the current input block. It is an error + /// to give more input after this method was called. This method must + /// be called to force all bytes to be flushed. + /// + public void Finish() + { + state |= (IS_FLUSHING | IS_FINISHING); + } + + /// + /// Returns true if the stream was finished and no more output bytes + /// are available. + /// + public bool IsFinished { + get { + return (state == FINISHED_STATE) && pending.IsFlushed; + } + } + + /// + /// Returns true, if the input buffer is empty. + /// You should then call setInput(). + /// NOTE: This method can also return true when the stream + /// was finished. + /// + public bool IsNeedingInput { + get { + return engine.NeedsInput(); + } + } + + /// + /// Sets the data which should be compressed next. This should be only + /// called when needsInput indicates that more input is needed. + /// If you call setInput when needsInput() returns false, the + /// previous input that is still pending will be thrown away. + /// The given byte array should not be changed, before needsInput() returns + /// true again. + /// This call is equivalent to setInput(input, 0, input.length). + /// + /// + /// the buffer containing the input data. + /// + /// + /// if the buffer was finished() or ended(). + /// + public void SetInput(byte[] input) + { + SetInput(input, 0, input.Length); + } + + /// + /// Sets the data which should be compressed next. This should be + /// only called when needsInput indicates that more input is needed. + /// The given byte array should not be changed, before needsInput() returns + /// true again. + /// + /// + /// the buffer containing the input data. + /// + /// + /// the start of the data. + /// + /// + /// the number of data bytes of input. + /// + /// + /// if the buffer was Finish()ed or if previous input is still pending. + /// + public void SetInput(byte[] input, int offset, int count) + { + if ((state & IS_FINISHING) != 0) { + throw new InvalidOperationException("Finish() already called"); + } + engine.SetInput(input, offset, count); + } + + /// + /// Sets the compression level. There is no guarantee of the exact + /// position of the change, but if you call this when needsInput is + /// true the change of compression level will occur somewhere near + /// before the end of the so far given input. + /// + /// + /// the new compression level. + /// + public void SetLevel(int level) + { + if (level == DEFAULT_COMPRESSION) { + level = 6; + } else if (level < NO_COMPRESSION || level > BEST_COMPRESSION) { + throw new ArgumentOutOfRangeException("level"); + } + + if (this.level != level) { + this.level = level; + engine.SetLevel(level); + } + } + + /// + /// Get current compression level + /// + /// Returns the current compression level + public int GetLevel() { + return level; + } + + /// + /// Sets the compression strategy. Strategy is one of + /// DEFAULT_STRATEGY, HUFFMAN_ONLY and FILTERED. For the exact + /// position where the strategy is changed, the same as for + /// SetLevel() applies. + /// + /// + /// The new compression strategy. + /// + public void SetStrategy(DeflateStrategy strategy) + { + engine.Strategy = strategy; + } + + /// + /// Deflates the current input block with to the given array. + /// + /// + /// The buffer where compressed data is stored + /// + /// + /// The number of compressed bytes added to the output, or 0 if either + /// IsNeedingInput() or IsFinished returns true or length is zero. + /// + public int Deflate(byte[] output) + { + return Deflate(output, 0, output.Length); + } + + /// + /// Deflates the current input block to the given array. + /// + /// + /// Buffer to store the compressed data. + /// + /// + /// Offset into the output array. + /// + /// + /// The maximum number of bytes that may be stored. + /// + /// + /// The number of compressed bytes added to the output, or 0 if either + /// needsInput() or finished() returns true or length is zero. + /// + /// + /// If Finish() was previously called. + /// + /// + /// If offset or length don't match the array length. + /// + public int Deflate(byte[] output, int offset, int length) + { + int origLength = length; + + if (state == CLOSED_STATE) { + throw new InvalidOperationException("Deflater closed"); + } + + if (state < BUSY_STATE) { + // output header + int header = (DEFLATED + + ((DeflaterConstants.MAX_WBITS - 8) << 4)) << 8; + int level_flags = (level - 1) >> 1; + if (level_flags < 0 || level_flags > 3) { + level_flags = 3; + } + header |= level_flags << 6; + if ((state & IS_SETDICT) != 0) { + // Dictionary was set + header |= DeflaterConstants.PRESET_DICT; + } + header += 31 - (header % 31); + + pending.WriteShortMSB(header); + if ((state & IS_SETDICT) != 0) { + int chksum = engine.Adler; + engine.ResetAdler(); + pending.WriteShortMSB(chksum >> 16); + pending.WriteShortMSB(chksum & 0xffff); + } + + state = BUSY_STATE | (state & (IS_FLUSHING | IS_FINISHING)); + } + + for (;;) { + int count = pending.Flush(output, offset, length); + offset += count; + totalOut += count; + length -= count; + + if (length == 0 || state == FINISHED_STATE) { + break; + } + + if (!engine.Deflate((state & IS_FLUSHING) != 0, (state & IS_FINISHING) != 0)) { + if (state == BUSY_STATE) { + // We need more input now + return origLength - length; + } else if (state == FLUSHING_STATE) { + if (level != NO_COMPRESSION) { + /* We have to supply some lookahead. 8 bit lookahead + * is needed by the zlib inflater, and we must fill + * the next byte, so that all bits are flushed. + */ + int neededbits = 8 + ((-pending.BitCount) & 7); + while (neededbits > 0) { + /* write a static tree block consisting solely of + * an EOF: + */ + pending.WriteBits(2, 10); + neededbits -= 10; + } + } + state = BUSY_STATE; + } else if (state == FINISHING_STATE) { + pending.AlignToByte(); + + // Compressed data is complete. Write footer information if required. + if (!noZlibHeaderOrFooter) { + int adler = engine.Adler; + pending.WriteShortMSB(adler >> 16); + pending.WriteShortMSB(adler & 0xffff); + } + state = FINISHED_STATE; + } + } + } + return origLength - length; + } + + /// + /// Sets the dictionary which should be used in the deflate process. + /// This call is equivalent to setDictionary(dict, 0, dict.Length). + /// + /// + /// the dictionary. + /// + /// + /// if SetInput () or Deflate () were already called or another dictionary was already set. + /// + public void SetDictionary(byte[] dictionary) + { + SetDictionary(dictionary, 0, dictionary.Length); + } + + /// + /// Sets the dictionary which should be used in the deflate process. + /// The dictionary is a byte array containing strings that are + /// likely to occur in the data which should be compressed. The + /// dictionary is not stored in the compressed output, only a + /// checksum. To decompress the output you need to supply the same + /// dictionary again. + /// + /// + /// The dictionary data + /// + /// + /// The index where dictionary information commences. + /// + /// + /// The number of bytes in the dictionary. + /// + /// + /// If SetInput () or Deflate() were already called or another dictionary was already set. + /// + public void SetDictionary(byte[] dictionary, int index, int count) + { + if (state != INIT_STATE) { + throw new InvalidOperationException(); + } + + state = SETDICT_STATE; + engine.SetDictionary(dictionary, index, count); + } + + #region Instance Fields + /// + /// Compression level. + /// + int level; + + /// + /// If true no Zlib/RFC1950 headers or footers are generated + /// + bool noZlibHeaderOrFooter; + + /// + /// The current state. + /// + int state; + + /// + /// The total bytes of output written. + /// + long totalOut; + + /// + /// The pending output. + /// + DeflaterPending pending; + + /// + /// The deflater engine. + /// + DeflaterEngine engine; + #endregion + } +} diff --git a/src/GitHub.Api/SharpZipLib/Zip/Compression/DeflaterConstants.cs b/src/GitHub.Api/SharpZipLib/Zip/Compression/DeflaterConstants.cs new file mode 100644 index 000000000..abc68cbac --- /dev/null +++ b/src/GitHub.Api/SharpZipLib/Zip/Compression/DeflaterConstants.cs @@ -0,0 +1,186 @@ +// DeflaterConstants.cs +// +// Copyright (C) 2001 Mike Krueger +// Copyright (C) 2004 John Reilly +// +// This file was translated from java, it was part of the GNU Classpath +// Copyright (C) 2001 Free Software Foundation, Inc. +// +// This program is free software; you can redistribute it and/or +// modify it under the terms of the GNU General Public License +// as published by the Free Software Foundation; either version 2 +// of the License, or (at your option) any later version. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. +// +// You should have received a copy of the GNU General Public License +// along with this program; if not, write to the Free Software +// Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. +// +// Linking this library statically or dynamically with other modules is +// making a combined work based on this library. Thus, the terms and +// conditions of the GNU General Public License cover the whole +// combination. +// +// As a special exception, the copyright holders of this library give you +// permission to link this library with independent modules to produce an +// executable, regardless of the license terms of these independent +// modules, and to copy and distribute the resulting executable under +// terms of your choice, provided that you also meet, for each linked +// independent module, the terms and conditions of the license of that +// module. An independent module is a module which is not derived from +// or based on this library. If you modify this library, you may extend +// this exception to your version of the library, but you are not +// obligated to do so. If you do not wish to do so, delete this +// exception statement from your version. + +using System; + +namespace GitHub.ICSharpCode.SharpZipLib.Zip.Compression +{ + + /// + /// This class contains constants used for deflation. + /// + public class DeflaterConstants + { + /// + /// Set to true to enable debugging + /// + public const bool DEBUGGING = false; + + /// + /// Written to Zip file to identify a stored block + /// + public const int STORED_BLOCK = 0; + + /// + /// Identifies static tree in Zip file + /// + public const int STATIC_TREES = 1; + + /// + /// Identifies dynamic tree in Zip file + /// + public const int DYN_TREES = 2; + + /// + /// Header flag indicating a preset dictionary for deflation + /// + public const int PRESET_DICT = 0x20; + + /// + /// Sets internal buffer sizes for Huffman encoding + /// + public const int DEFAULT_MEM_LEVEL = 8; + + /// + /// Internal compression engine constant + /// + public const int MAX_MATCH = 258; + + /// + /// Internal compression engine constant + /// + public const int MIN_MATCH = 3; + + /// + /// Internal compression engine constant + /// + public const int MAX_WBITS = 15; + + /// + /// Internal compression engine constant + /// + public const int WSIZE = 1 << MAX_WBITS; + + /// + /// Internal compression engine constant + /// + public const int WMASK = WSIZE - 1; + + /// + /// Internal compression engine constant + /// + public const int HASH_BITS = DEFAULT_MEM_LEVEL + 7; + + /// + /// Internal compression engine constant + /// + public const int HASH_SIZE = 1 << HASH_BITS; + + /// + /// Internal compression engine constant + /// + public const int HASH_MASK = HASH_SIZE - 1; + + /// + /// Internal compression engine constant + /// + public const int HASH_SHIFT = (HASH_BITS + MIN_MATCH - 1) / MIN_MATCH; + + /// + /// Internal compression engine constant + /// + public const int MIN_LOOKAHEAD = MAX_MATCH + MIN_MATCH + 1; + + /// + /// Internal compression engine constant + /// + public const int MAX_DIST = WSIZE - MIN_LOOKAHEAD; + + /// + /// Internal compression engine constant + /// + public const int PENDING_BUF_SIZE = 1 << (DEFAULT_MEM_LEVEL + 8); + + /// + /// Internal compression engine constant + /// + public static int MAX_BLOCK_SIZE = Math.Min(65535, PENDING_BUF_SIZE - 5); + + /// + /// Internal compression engine constant + /// + public const int DEFLATE_STORED = 0; + + /// + /// Internal compression engine constant + /// + public const int DEFLATE_FAST = 1; + + /// + /// Internal compression engine constant + /// + public const int DEFLATE_SLOW = 2; + + /// + /// Internal compression engine constant + /// + public static int[] GOOD_LENGTH = { 0, 4, 4, 4, 4, 8, 8, 8, 32, 32 }; + + /// + /// Internal compression engine constant + /// + public static int[] MAX_LAZY = { 0, 4, 5, 6, 4, 16, 16, 32, 128, 258 }; + + /// + /// Internal compression engine constant + /// + public static int[] NICE_LENGTH = { 0, 8, 16, 32, 16, 32, 128, 128, 258, 258 }; + + /// + /// Internal compression engine constant + /// + public static int[] MAX_CHAIN = { 0, 4, 8, 32, 16, 32, 128, 256, 1024, 4096 }; + + /// + /// Internal compression engine constant + /// + public static int[] COMPR_FUNC = { 0, 1, 1, 1, 1, 2, 2, 2, 2, 2 }; + + } +} diff --git a/src/GitHub.Api/SharpZipLib/Zip/Compression/DeflaterEngine.cs b/src/GitHub.Api/SharpZipLib/Zip/Compression/DeflaterEngine.cs new file mode 100644 index 000000000..f3a39d8e9 --- /dev/null +++ b/src/GitHub.Api/SharpZipLib/Zip/Compression/DeflaterEngine.cs @@ -0,0 +1,869 @@ +// DeflaterEngine.cs +// +// Copyright (C) 2001 Mike Krueger +// Copyright (C) 2004 John Reilly +// +// This file was translated from java, it was part of the GNU Classpath +// Copyright (C) 2001 Free Software Foundation, Inc. +// +// This program is free software; you can redistribute it and/or +// modify it under the terms of the GNU General Public License +// as published by the Free Software Foundation; either version 2 +// of the License, or (at your option) any later version. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. +// +// You should have received a copy of the GNU General Public License +// along with this program; if not, write to the Free Software +// Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. +// +// Linking this library statically or dynamically with other modules is +// making a combined work based on this library. Thus, the terms and +// conditions of the GNU General Public License cover the whole +// combination. +// +// As a special exception, the copyright holders of this library give you +// permission to link this library with independent modules to produce an +// executable, regardless of the license terms of these independent +// modules, and to copy and distribute the resulting executable under +// terms of your choice, provided that you also meet, for each linked +// independent module, the terms and conditions of the license of that +// module. An independent module is a module which is not derived from +// or based on this library. If you modify this library, you may extend +// this exception to your version of the library, but you are not +// obligated to do so. If you do not wish to do so, delete this +// exception statement from your version. + +using System; + +using GitHub.ICSharpCode.SharpZipLib.Checksums; + +namespace GitHub.ICSharpCode.SharpZipLib.Zip.Compression +{ + + /// + /// Strategies for deflater + /// + public enum DeflateStrategy + { + /// + /// The default strategy + /// + Default = 0, + + /// + /// This strategy will only allow longer string repetitions. It is + /// useful for random data with a small character set. + /// + Filtered = 1, + + + /// + /// This strategy will not look for string repetitions at all. It + /// only encodes with Huffman trees (which means, that more common + /// characters get a smaller encoding. + /// + HuffmanOnly = 2 + } + + // DEFLATE ALGORITHM: + // + // The uncompressed stream is inserted into the window array. When + // the window array is full the first half is thrown away and the + // second half is copied to the beginning. + // + // The head array is a hash table. Three characters build a hash value + // and they the value points to the corresponding index in window of + // the last string with this hash. The prev array implements a + // linked list of matches with the same hash: prev[index & WMASK] points + // to the previous index with the same hash. + // + + + /// + /// Low level compression engine for deflate algorithm which uses a 32K sliding window + /// with secondary compression from Huffman/Shannon-Fano codes. + /// + public class DeflaterEngine : DeflaterConstants + { + #region Constants + const int TooFar = 4096; + #endregion + + #region Constructors + /// + /// Construct instance with pending buffer + /// + /// + /// Pending buffer to use + /// > + public DeflaterEngine(DeflaterPending pending) + { + this.pending = pending; + huffman = new DeflaterHuffman(pending); + adler = new Adler32(); + + window = new byte[2 * WSIZE]; + head = new short[HASH_SIZE]; + prev = new short[WSIZE]; + + // We start at index 1, to avoid an implementation deficiency, that + // we cannot build a repeat pattern at index 0. + blockStart = strstart = 1; + } + + #endregion + + /// + /// Deflate drives actual compression of data + /// + /// True to flush input buffers + /// Finish deflation with the current input. + /// Returns true if progress has been made. + public bool Deflate(bool flush, bool finish) + { + bool progress; + do + { + FillWindow(); + bool canFlush = flush && (inputOff == inputEnd); + +#if DebugDeflation + if (DeflaterConstants.DEBUGGING) { + Console.WriteLine("window: [" + blockStart + "," + strstart + "," + + lookahead + "], " + compressionFunction + "," + canFlush); + } +#endif + switch (compressionFunction) + { + case DEFLATE_STORED: + progress = DeflateStored(canFlush, finish); + break; + case DEFLATE_FAST: + progress = DeflateFast(canFlush, finish); + break; + case DEFLATE_SLOW: + progress = DeflateSlow(canFlush, finish); + break; + default: + throw new InvalidOperationException("unknown compressionFunction"); + } + } while (pending.IsFlushed && progress); // repeat while we have no pending output and progress was made + return progress; + } + + /// + /// Sets input data to be deflated. Should only be called when NeedsInput() + /// returns true + /// + /// The buffer containing input data. + /// The offset of the first byte of data. + /// The number of bytes of data to use as input. + public void SetInput(byte[] buffer, int offset, int count) + { + if ( buffer == null ) + { + throw new ArgumentNullException("buffer"); + } + + if ( offset < 0 ) + { + throw new ArgumentOutOfRangeException("offset"); + } + + if ( count < 0 ) + { + throw new ArgumentOutOfRangeException("count"); + } + + if (inputOff < inputEnd) + { + throw new InvalidOperationException("Old input was not completely processed"); + } + + int end = offset + count; + + /* We want to throw an ArrayIndexOutOfBoundsException early. The + * check is very tricky: it also handles integer wrap around. + */ + if ((offset > end) || (end > buffer.Length) ) + { + throw new ArgumentOutOfRangeException("count"); + } + + inputBuf = buffer; + inputOff = offset; + inputEnd = end; + } + + /// + /// Determines if more input is needed. + /// + /// Return true if input is needed via SetInput + public bool NeedsInput() + { + return (inputEnd == inputOff); + } + + /// + /// Set compression dictionary + /// + /// The buffer containing the dictionary data + /// The offset in the buffer for the first byte of data + /// The length of the dictionary data. + public void SetDictionary(byte[] buffer, int offset, int length) + { +#if DebugDeflation + if (DeflaterConstants.DEBUGGING && (strstart != 1) ) + { + throw new InvalidOperationException("strstart not 1"); + } +#endif + adler.Update(buffer, offset, length); + if (length < MIN_MATCH) + { + return; + } + + if (length > MAX_DIST) + { + offset += length - MAX_DIST; + length = MAX_DIST; + } + + System.Array.Copy(buffer, offset, window, strstart, length); + + UpdateHash(); + --length; + while (--length > 0) + { + InsertString(); + strstart++; + } + strstart += 2; + blockStart = strstart; + } + + /// + /// Reset internal state + /// + public void Reset() + { + huffman.Reset(); + adler.Reset(); + blockStart = strstart = 1; + lookahead = 0; + totalIn = 0; + prevAvailable = false; + matchLen = MIN_MATCH - 1; + + for (int i = 0; i < HASH_SIZE; i++) { + head[i] = 0; + } + + for (int i = 0; i < WSIZE; i++) { + prev[i] = 0; + } + } + + /// + /// Reset Adler checksum + /// + public void ResetAdler() + { + adler.Reset(); + } + + /// + /// Get current value of Adler checksum + /// + public int Adler { + get { + return unchecked((int)adler.Value); + } + } + + /// + /// Total data processed + /// + public long TotalIn { + get { + return totalIn; + } + } + + /// + /// Get/set the deflate strategy + /// + public DeflateStrategy Strategy { + get { + return strategy; + } + set { + strategy = value; + } + } + + /// + /// Set the deflate level (0-9) + /// + /// The value to set the level to. + public void SetLevel(int level) + { + if ( (level < 0) || (level > 9) ) + { + throw new ArgumentOutOfRangeException("level"); + } + + goodLength = DeflaterConstants.GOOD_LENGTH[level]; + max_lazy = DeflaterConstants.MAX_LAZY[level]; + niceLength = DeflaterConstants.NICE_LENGTH[level]; + max_chain = DeflaterConstants.MAX_CHAIN[level]; + + if (DeflaterConstants.COMPR_FUNC[level] != compressionFunction) { + +#if DebugDeflation + if (DeflaterConstants.DEBUGGING) { + Console.WriteLine("Change from " + compressionFunction + " to " + + DeflaterConstants.COMPR_FUNC[level]); + } +#endif + switch (compressionFunction) { + case DEFLATE_STORED: + if (strstart > blockStart) { + huffman.FlushStoredBlock(window, blockStart, + strstart - blockStart, false); + blockStart = strstart; + } + UpdateHash(); + break; + + case DEFLATE_FAST: + if (strstart > blockStart) { + huffman.FlushBlock(window, blockStart, strstart - blockStart, + false); + blockStart = strstart; + } + break; + + case DEFLATE_SLOW: + if (prevAvailable) { + huffman.TallyLit(window[strstart-1] & 0xff); + } + if (strstart > blockStart) { + huffman.FlushBlock(window, blockStart, strstart - blockStart, false); + blockStart = strstart; + } + prevAvailable = false; + matchLen = MIN_MATCH - 1; + break; + } + compressionFunction = COMPR_FUNC[level]; + } + } + + /// + /// Fill the window + /// + public void FillWindow() + { + /* If the window is almost full and there is insufficient lookahead, + * move the upper half to the lower one to make room in the upper half. + */ + if (strstart >= WSIZE + MAX_DIST) + { + SlideWindow(); + } + + /* If there is not enough lookahead, but still some input left, + * read in the input + */ + while (lookahead < DeflaterConstants.MIN_LOOKAHEAD && inputOff < inputEnd) + { + int more = 2 * WSIZE - lookahead - strstart; + + if (more > inputEnd - inputOff) + { + more = inputEnd - inputOff; + } + + System.Array.Copy(inputBuf, inputOff, window, strstart + lookahead, more); + adler.Update(inputBuf, inputOff, more); + + inputOff += more; + totalIn += more; + lookahead += more; + } + + if (lookahead >= MIN_MATCH) + { + UpdateHash(); + } + } + + void UpdateHash() + { +/* + if (DEBUGGING) { + Console.WriteLine("updateHash: "+strstart); + } +*/ + ins_h = (window[strstart] << HASH_SHIFT) ^ window[strstart + 1]; + } + + /// + /// Inserts the current string in the head hash and returns the previous + /// value for this hash. + /// + /// The previous hash value + int InsertString() + { + short match; + int hash = ((ins_h << HASH_SHIFT) ^ window[strstart + (MIN_MATCH -1)]) & HASH_MASK; + +#if DebugDeflation + if (DeflaterConstants.DEBUGGING) + { + if (hash != (((window[strstart] << (2*HASH_SHIFT)) ^ + (window[strstart + 1] << HASH_SHIFT) ^ + (window[strstart + 2])) & HASH_MASK)) { + throw new SharpZipBaseException("hash inconsistent: " + hash + "/" + +window[strstart] + "," + +window[strstart + 1] + "," + +window[strstart + 2] + "," + HASH_SHIFT); + } + } +#endif + prev[strstart & WMASK] = match = head[hash]; + head[hash] = unchecked((short)strstart); + ins_h = hash; + return match & 0xffff; + } + + void SlideWindow() + { + Array.Copy(window, WSIZE, window, 0, WSIZE); + matchStart -= WSIZE; + strstart -= WSIZE; + blockStart -= WSIZE; + + // Slide the hash table (could be avoided with 32 bit values + // at the expense of memory usage). + for (int i = 0; i < HASH_SIZE; ++i) { + int m = head[i] & 0xffff; + head[i] = (short)(m >= WSIZE ? (m - WSIZE) : 0); + } + + // Slide the prev table. + for (int i = 0; i < WSIZE; i++) { + int m = prev[i] & 0xffff; + prev[i] = (short)(m >= WSIZE ? (m - WSIZE) : 0); + } + } + + /// + /// Find the best (longest) string in the window matching the + /// string starting at strstart. + /// + /// Preconditions: + /// + /// strstart + MAX_MATCH <= window.length. + /// + /// + /// True if a match greater than the minimum length is found + bool FindLongestMatch(int curMatch) + { + int chainLength = this.max_chain; + int niceLength = this.niceLength; + short[] prev = this.prev; + int scan = this.strstart; + int match; + int best_end = this.strstart + matchLen; + int best_len = Math.Max(matchLen, MIN_MATCH - 1); + + int limit = Math.Max(strstart - MAX_DIST, 0); + + int strend = strstart + MAX_MATCH - 1; + byte scan_end1 = window[best_end - 1]; + byte scan_end = window[best_end]; + + // Do not waste too much time if we already have a good match: + if (best_len >= this.goodLength) { + chainLength >>= 2; + } + + /* Do not look for matches beyond the end of the input. This is necessary + * to make deflate deterministic. + */ + if (niceLength > lookahead) { + niceLength = lookahead; + } + +#if DebugDeflation + + if (DeflaterConstants.DEBUGGING && (strstart > 2 * WSIZE - MIN_LOOKAHEAD)) + { + throw new InvalidOperationException("need lookahead"); + } +#endif + + do { + +#if DebugDeflation + + if (DeflaterConstants.DEBUGGING && (curMatch >= strstart) ) + { + throw new InvalidOperationException("no future"); + } +#endif + if (window[curMatch + best_len] != scan_end || + window[curMatch + best_len - 1] != scan_end1 || + window[curMatch] != window[scan] || + window[curMatch + 1] != window[scan + 1]) { + continue; + } + + match = curMatch + 2; + scan += 2; + + /* We check for insufficient lookahead only every 8th comparison; + * the 256th check will be made at strstart + 258. + */ + while ( + window[++scan] == window[++match] && + window[++scan] == window[++match] && + window[++scan] == window[++match] && + window[++scan] == window[++match] && + window[++scan] == window[++match] && + window[++scan] == window[++match] && + window[++scan] == window[++match] && + window[++scan] == window[++match] && + (scan < strend)) + { + // Do nothing + } + + if (scan > best_end) { +#if DebugDeflation + if (DeflaterConstants.DEBUGGING && (ins_h == 0) ) + Console.Error.WriteLine("Found match: " + curMatch + "-" + (scan - strstart)); +#endif + matchStart = curMatch; + best_end = scan; + best_len = scan - strstart; + + if (best_len >= niceLength) { + break; + } + + scan_end1 = window[best_end - 1]; + scan_end = window[best_end]; + } + scan = strstart; + } while ((curMatch = (prev[curMatch & WMASK] & 0xffff)) > limit && --chainLength != 0); + + matchLen = Math.Min(best_len, lookahead); + return matchLen >= MIN_MATCH; + } + + bool DeflateStored(bool flush, bool finish) + { + if (!flush && (lookahead == 0)) { + return false; + } + + strstart += lookahead; + lookahead = 0; + + int storedLength = strstart - blockStart; + + if ((storedLength >= DeflaterConstants.MAX_BLOCK_SIZE) || // Block is full + (blockStart < WSIZE && storedLength >= MAX_DIST) || // Block may move out of window + flush) { + bool lastBlock = finish; + if (storedLength > DeflaterConstants.MAX_BLOCK_SIZE) { + storedLength = DeflaterConstants.MAX_BLOCK_SIZE; + lastBlock = false; + } + +#if DebugDeflation + if (DeflaterConstants.DEBUGGING) + { + Console.WriteLine("storedBlock[" + storedLength + "," + lastBlock + "]"); + } +#endif + + huffman.FlushStoredBlock(window, blockStart, storedLength, lastBlock); + blockStart += storedLength; + return !lastBlock; + } + return true; + } + + bool DeflateFast(bool flush, bool finish) + { + if (lookahead < MIN_LOOKAHEAD && !flush) { + return false; + } + + while (lookahead >= MIN_LOOKAHEAD || flush) { + if (lookahead == 0) { + // We are flushing everything + huffman.FlushBlock(window, blockStart, strstart - blockStart, finish); + blockStart = strstart; + return false; + } + + if (strstart > 2 * WSIZE - MIN_LOOKAHEAD) { + /* slide window, as FindLongestMatch needs this. + * This should only happen when flushing and the window + * is almost full. + */ + SlideWindow(); + } + + int hashHead; + if (lookahead >= MIN_MATCH && + (hashHead = InsertString()) != 0 && + strategy != DeflateStrategy.HuffmanOnly && + strstart - hashHead <= MAX_DIST && + FindLongestMatch(hashHead)) { + // longestMatch sets matchStart and matchLen +#if DebugDeflation + if (DeflaterConstants.DEBUGGING) + { + for (int i = 0 ; i < matchLen; i++) { + if (window[strstart + i] != window[matchStart + i]) { + throw new SharpZipBaseException("Match failure"); + } + } + } +#endif + + bool full = huffman.TallyDist(strstart - matchStart, matchLen); + + lookahead -= matchLen; + if (matchLen <= max_lazy && lookahead >= MIN_MATCH) { + while (--matchLen > 0) { + ++strstart; + InsertString(); + } + ++strstart; + } else { + strstart += matchLen; + if (lookahead >= MIN_MATCH - 1) { + UpdateHash(); + } + } + matchLen = MIN_MATCH - 1; + if (!full) { + continue; + } + } else { + // No match found + huffman.TallyLit(window[strstart] & 0xff); + ++strstart; + --lookahead; + } + + if (huffman.IsFull()) { + bool lastBlock = finish && (lookahead == 0); + huffman.FlushBlock(window, blockStart, strstart - blockStart, lastBlock); + blockStart = strstart; + return !lastBlock; + } + } + return true; + } + + bool DeflateSlow(bool flush, bool finish) + { + if (lookahead < MIN_LOOKAHEAD && !flush) { + return false; + } + + while (lookahead >= MIN_LOOKAHEAD || flush) { + if (lookahead == 0) { + if (prevAvailable) { + huffman.TallyLit(window[strstart-1] & 0xff); + } + prevAvailable = false; + + // We are flushing everything +#if DebugDeflation + if (DeflaterConstants.DEBUGGING && !flush) + { + throw new SharpZipBaseException("Not flushing, but no lookahead"); + } +#endif + huffman.FlushBlock(window, blockStart, strstart - blockStart, + finish); + blockStart = strstart; + return false; + } + + if (strstart >= 2 * WSIZE - MIN_LOOKAHEAD) { + /* slide window, as FindLongestMatch needs this. + * This should only happen when flushing and the window + * is almost full. + */ + SlideWindow(); + } + + int prevMatch = matchStart; + int prevLen = matchLen; + if (lookahead >= MIN_MATCH) { + + int hashHead = InsertString(); + + if (strategy != DeflateStrategy.HuffmanOnly && + hashHead != 0 && + strstart - hashHead <= MAX_DIST && + FindLongestMatch(hashHead)) { + + // longestMatch sets matchStart and matchLen + + // Discard match if too small and too far away + if (matchLen <= 5 && (strategy == DeflateStrategy.Filtered || (matchLen == MIN_MATCH && strstart - matchStart > TooFar))) { + matchLen = MIN_MATCH - 1; + } + } + } + + // previous match was better + if ((prevLen >= MIN_MATCH) && (matchLen <= prevLen) ) { +#if DebugDeflation + if (DeflaterConstants.DEBUGGING) + { + for (int i = 0 ; i < matchLen; i++) { + if (window[strstart-1+i] != window[prevMatch + i]) + throw new SharpZipBaseException(); + } + } +#endif + huffman.TallyDist(strstart - 1 - prevMatch, prevLen); + prevLen -= 2; + do { + strstart++; + lookahead--; + if (lookahead >= MIN_MATCH) { + InsertString(); + } + } while (--prevLen > 0); + + strstart ++; + lookahead--; + prevAvailable = false; + matchLen = MIN_MATCH - 1; + } else { + if (prevAvailable) { + huffman.TallyLit(window[strstart-1] & 0xff); + } + prevAvailable = true; + strstart++; + lookahead--; + } + + if (huffman.IsFull()) { + int len = strstart - blockStart; + if (prevAvailable) { + len--; + } + bool lastBlock = (finish && (lookahead == 0) && !prevAvailable); + huffman.FlushBlock(window, blockStart, len, lastBlock); + blockStart += len; + return !lastBlock; + } + } + return true; + } + + #region Instance Fields + + // Hash index of string to be inserted + int ins_h; + + /// + /// Hashtable, hashing three characters to an index for window, so + /// that window[index]..window[index+2] have this hash code. + /// Note that the array should really be unsigned short, so you need + /// to and the values with 0xffff. + /// + short[] head; + + /// + /// prev[index & WMASK] points to the previous index that has the + /// same hash code as the string starting at index. This way + /// entries with the same hash code are in a linked list. + /// Note that the array should really be unsigned short, so you need + /// to and the values with 0xffff. + /// + short[] prev; + + int matchStart; + // Length of best match + int matchLen; + // Set if previous match exists + bool prevAvailable; + int blockStart; + + /// + /// Points to the current character in the window. + /// + int strstart; + + /// + /// lookahead is the number of characters starting at strstart in + /// window that are valid. + /// So window[strstart] until window[strstart+lookahead-1] are valid + /// characters. + /// + int lookahead; + + /// + /// This array contains the part of the uncompressed stream that + /// is of relevance. The current character is indexed by strstart. + /// + byte[] window; + + DeflateStrategy strategy; + int max_chain, max_lazy, niceLength, goodLength; + + /// + /// The current compression function. + /// + int compressionFunction; + + /// + /// The input data for compression. + /// + byte[] inputBuf; + + /// + /// The total bytes of input read. + /// + long totalIn; + + /// + /// The offset into inputBuf, where input data starts. + /// + int inputOff; + + /// + /// The end offset of the input data. + /// + int inputEnd; + + DeflaterPending pending; + DeflaterHuffman huffman; + + /// + /// The adler checksum + /// + Adler32 adler; + #endregion + } +} diff --git a/src/GitHub.Api/SharpZipLib/Zip/Compression/DeflaterHuffman.cs b/src/GitHub.Api/SharpZipLib/Zip/Compression/DeflaterHuffman.cs new file mode 100644 index 000000000..b7251e95f --- /dev/null +++ b/src/GitHub.Api/SharpZipLib/Zip/Compression/DeflaterHuffman.cs @@ -0,0 +1,908 @@ +// DeflaterHuffman.cs +// +// Copyright (C) 2001 Mike Krueger +// Copyright (C) 2004 John Reilly +// +// This file was translated from java, it was part of the GNU Classpath +// Copyright (C) 2001 Free Software Foundation, Inc. +// +// This program is free software; you can redistribute it and/or +// modify it under the terms of the GNU General Public License +// as published by the Free Software Foundation; either version 2 +// of the License, or (at your option) any later version. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. +// +// You should have received a copy of the GNU General Public License +// along with this program; if not, write to the Free Software +// Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. +// +// Linking this library statically or dynamically with other modules is +// making a combined work based on this library. Thus, the terms and +// conditions of the GNU General Public License cover the whole +// combination. +// +// As a special exception, the copyright holders of this library give you +// permission to link this library with independent modules to produce an +// executable, regardless of the license terms of these independent +// modules, and to copy and distribute the resulting executable under +// terms of your choice, provided that you also meet, for each linked +// independent module, the terms and conditions of the license of that +// module. An independent module is a module which is not derived from +// or based on this library. If you modify this library, you may extend +// this exception to your version of the library, but you are not +// obligated to do so. If you do not wish to do so, delete this +// exception statement from your version. + +using System; + +namespace GitHub.ICSharpCode.SharpZipLib.Zip.Compression +{ + + /// + /// This is the DeflaterHuffman class. + /// + /// This class is not thread safe. This is inherent in the API, due + /// to the split of Deflate and SetInput. + /// + /// author of the original java version : Jochen Hoenicke + /// + public class DeflaterHuffman + { + const int BUFSIZE = 1 << (DeflaterConstants.DEFAULT_MEM_LEVEL + 6); + const int LITERAL_NUM = 286; + + // Number of distance codes + const int DIST_NUM = 30; + // Number of codes used to transfer bit lengths + const int BITLEN_NUM = 19; + + // repeat previous bit length 3-6 times (2 bits of repeat count) + const int REP_3_6 = 16; + // repeat a zero length 3-10 times (3 bits of repeat count) + const int REP_3_10 = 17; + // repeat a zero length 11-138 times (7 bits of repeat count) + const int REP_11_138 = 18; + + const int EOF_SYMBOL = 256; + + // The lengths of the bit length codes are sent in order of decreasing + // probability, to avoid transmitting the lengths for unused bit length codes. + static readonly int[] BL_ORDER = { 16, 17, 18, 0, 8, 7, 9, 6, 10, 5, 11, 4, 12, 3, 13, 2, 14, 1, 15 }; + + static readonly byte[] bit4Reverse = { + 0, + 8, + 4, + 12, + 2, + 10, + 6, + 14, + 1, + 9, + 5, + 13, + 3, + 11, + 7, + 15 + }; + + static short[] staticLCodes; + static byte[] staticLLength; + static short[] staticDCodes; + static byte[] staticDLength; + + class Tree + { + #region Instance Fields + public short[] freqs; + + public byte[] length; + + public int minNumCodes; + + public int numCodes; + + short[] codes; + int[] bl_counts; + int maxLength; + DeflaterHuffman dh; + #endregion + + #region Constructors + public Tree(DeflaterHuffman dh, int elems, int minCodes, int maxLength) + { + this.dh = dh; + this.minNumCodes = minCodes; + this.maxLength = maxLength; + freqs = new short[elems]; + bl_counts = new int[maxLength]; + } + + #endregion + + /// + /// Resets the internal state of the tree + /// + public void Reset() + { + for (int i = 0; i < freqs.Length; i++) { + freqs[i] = 0; + } + codes = null; + length = null; + } + + public void WriteSymbol(int code) + { + // if (DeflaterConstants.DEBUGGING) { + // freqs[code]--; + // // Console.Write("writeSymbol("+freqs.length+","+code+"): "); + // } + dh.pending.WriteBits(codes[code] & 0xffff, length[code]); + } + + /// + /// Check that all frequencies are zero + /// + /// + /// At least one frequency is non-zero + /// + public void CheckEmpty() + { + bool empty = true; + for (int i = 0; i < freqs.Length; i++) { + if (freqs[i] != 0) { + //Console.WriteLine("freqs[" + i + "] == " + freqs[i]); + empty = false; + } + } + + if (!empty) { + throw new SharpZipBaseException("!Empty"); + } + } + + /// + /// Set static codes and length + /// + /// new codes + /// length for new codes + public void SetStaticCodes(short[] staticCodes, byte[] staticLengths) + { + codes = staticCodes; + length = staticLengths; + } + + /// + /// Build dynamic codes and lengths + /// + public void BuildCodes() + { + int numSymbols = freqs.Length; + int[] nextCode = new int[maxLength]; + int code = 0; + + codes = new short[freqs.Length]; + + // if (DeflaterConstants.DEBUGGING) { + // //Console.WriteLine("buildCodes: "+freqs.Length); + // } + + for (int bits = 0; bits < maxLength; bits++) { + nextCode[bits] = code; + code += bl_counts[bits] << (15 - bits); + + // if (DeflaterConstants.DEBUGGING) { + // //Console.WriteLine("bits: " + ( bits + 1) + " count: " + bl_counts[bits] + // +" nextCode: "+code); + // } + } + +#if DebugDeflation + if ( DeflaterConstants.DEBUGGING && (code != 65536) ) + { + throw new SharpZipBaseException("Inconsistent bl_counts!"); + } +#endif + for (int i=0; i < numCodes; i++) { + int bits = length[i]; + if (bits > 0) { + + // if (DeflaterConstants.DEBUGGING) { + // //Console.WriteLine("codes["+i+"] = rev(" + nextCode[bits-1]+"), + // +bits); + // } + + codes[i] = BitReverse(nextCode[bits-1]); + nextCode[bits-1] += 1 << (16 - bits); + } + } + } + + public void BuildTree() + { + int numSymbols = freqs.Length; + + /* heap is a priority queue, sorted by frequency, least frequent + * nodes first. The heap is a binary tree, with the property, that + * the parent node is smaller than both child nodes. This assures + * that the smallest node is the first parent. + * + * The binary tree is encoded in an array: 0 is root node and + * the nodes 2*n+1, 2*n+2 are the child nodes of node n. + */ + int[] heap = new int[numSymbols]; + int heapLen = 0; + int maxCode = 0; + for (int n = 0; n < numSymbols; n++) { + int freq = freqs[n]; + if (freq != 0) { + // Insert n into heap + int pos = heapLen++; + int ppos; + while (pos > 0 && freqs[heap[ppos = (pos - 1) / 2]] > freq) { + heap[pos] = heap[ppos]; + pos = ppos; + } + heap[pos] = n; + + maxCode = n; + } + } + + /* We could encode a single literal with 0 bits but then we + * don't see the literals. Therefore we force at least two + * literals to avoid this case. We don't care about order in + * this case, both literals get a 1 bit code. + */ + while (heapLen < 2) { + int node = maxCode < 2 ? ++maxCode : 0; + heap[heapLen++] = node; + } + + numCodes = Math.Max(maxCode + 1, minNumCodes); + + int numLeafs = heapLen; + int[] childs = new int[4 * heapLen - 2]; + int[] values = new int[2 * heapLen - 1]; + int numNodes = numLeafs; + for (int i = 0; i < heapLen; i++) { + int node = heap[i]; + childs[2 * i] = node; + childs[2 * i + 1] = -1; + values[i] = freqs[node] << 8; + heap[i] = i; + } + + /* Construct the Huffman tree by repeatedly combining the least two + * frequent nodes. + */ + do { + int first = heap[0]; + int last = heap[--heapLen]; + + // Propagate the hole to the leafs of the heap + int ppos = 0; + int path = 1; + + while (path < heapLen) { + if (path + 1 < heapLen && values[heap[path]] > values[heap[path+1]]) { + path++; + } + + heap[ppos] = heap[path]; + ppos = path; + path = path * 2 + 1; + } + + /* Now propagate the last element down along path. Normally + * it shouldn't go too deep. + */ + int lastVal = values[last]; + while ((path = ppos) > 0 && values[heap[ppos = (path - 1)/2]] > lastVal) { + heap[path] = heap[ppos]; + } + heap[path] = last; + + + int second = heap[0]; + + // Create a new node father of first and second + last = numNodes++; + childs[2 * last] = first; + childs[2 * last + 1] = second; + int mindepth = Math.Min(values[first] & 0xff, values[second] & 0xff); + values[last] = lastVal = values[first] + values[second] - mindepth + 1; + + // Again, propagate the hole to the leafs + ppos = 0; + path = 1; + + while (path < heapLen) { + if (path + 1 < heapLen && values[heap[path]] > values[heap[path+1]]) { + path++; + } + + heap[ppos] = heap[path]; + ppos = path; + path = ppos * 2 + 1; + } + + // Now propagate the new element down along path + while ((path = ppos) > 0 && values[heap[ppos = (path - 1)/2]] > lastVal) { + heap[path] = heap[ppos]; + } + heap[path] = last; + } while (heapLen > 1); + + if (heap[0] != childs.Length / 2 - 1) { + throw new SharpZipBaseException("Heap invariant violated"); + } + + BuildLength(childs); + } + + /// + /// Get encoded length + /// + /// Encoded length, the sum of frequencies * lengths + public int GetEncodedLength() + { + int len = 0; + for (int i = 0; i < freqs.Length; i++) { + len += freqs[i] * length[i]; + } + return len; + } + + /// + /// Scan a literal or distance tree to determine the frequencies of the codes + /// in the bit length tree. + /// + public void CalcBLFreq(Tree blTree) + { + int max_count; /* max repeat count */ + int min_count; /* min repeat count */ + int count; /* repeat count of the current code */ + int curlen = -1; /* length of current code */ + + int i = 0; + while (i < numCodes) { + count = 1; + int nextlen = length[i]; + if (nextlen == 0) { + max_count = 138; + min_count = 3; + } else { + max_count = 6; + min_count = 3; + if (curlen != nextlen) { + blTree.freqs[nextlen]++; + count = 0; + } + } + curlen = nextlen; + i++; + + while (i < numCodes && curlen == length[i]) { + i++; + if (++count >= max_count) { + break; + } + } + + if (count < min_count) { + blTree.freqs[curlen] += (short)count; + } else if (curlen != 0) { + blTree.freqs[REP_3_6]++; + } else if (count <= 10) { + blTree.freqs[REP_3_10]++; + } else { + blTree.freqs[REP_11_138]++; + } + } + } + + /// + /// Write tree values + /// + /// Tree to write + public void WriteTree(Tree blTree) + { + int max_count; // max repeat count + int min_count; // min repeat count + int count; // repeat count of the current code + int curlen = -1; // length of current code + + int i = 0; + while (i < numCodes) { + count = 1; + int nextlen = length[i]; + if (nextlen == 0) { + max_count = 138; + min_count = 3; + } else { + max_count = 6; + min_count = 3; + if (curlen != nextlen) { + blTree.WriteSymbol(nextlen); + count = 0; + } + } + curlen = nextlen; + i++; + + while (i < numCodes && curlen == length[i]) { + i++; + if (++count >= max_count) { + break; + } + } + + if (count < min_count) { + while (count-- > 0) { + blTree.WriteSymbol(curlen); + } + } else if (curlen != 0) { + blTree.WriteSymbol(REP_3_6); + dh.pending.WriteBits(count - 3, 2); + } else if (count <= 10) { + blTree.WriteSymbol(REP_3_10); + dh.pending.WriteBits(count - 3, 3); + } else { + blTree.WriteSymbol(REP_11_138); + dh.pending.WriteBits(count - 11, 7); + } + } + } + + void BuildLength(int[] childs) + { + this.length = new byte [freqs.Length]; + int numNodes = childs.Length / 2; + int numLeafs = (numNodes + 1) / 2; + int overflow = 0; + + for (int i = 0; i < maxLength; i++) { + bl_counts[i] = 0; + } + + // First calculate optimal bit lengths + int[] lengths = new int[numNodes]; + lengths[numNodes-1] = 0; + + for (int i = numNodes - 1; i >= 0; i--) { + if (childs[2 * i + 1] != -1) { + int bitLength = lengths[i] + 1; + if (bitLength > maxLength) { + bitLength = maxLength; + overflow++; + } + lengths[childs[2 * i]] = lengths[childs[2 * i + 1]] = bitLength; + } else { + // A leaf node + int bitLength = lengths[i]; + bl_counts[bitLength - 1]++; + this.length[childs[2*i]] = (byte) lengths[i]; + } + } + + // if (DeflaterConstants.DEBUGGING) { + // //Console.WriteLine("Tree "+freqs.Length+" lengths:"); + // for (int i=0; i < numLeafs; i++) { + // //Console.WriteLine("Node "+childs[2*i]+" freq: "+freqs[childs[2*i]] + // + " len: "+length[childs[2*i]]); + // } + // } + + if (overflow == 0) { + return; + } + + int incrBitLen = maxLength - 1; + do { + // Find the first bit length which could increase: + while (bl_counts[--incrBitLen] == 0) + ; + + // Move this node one down and remove a corresponding + // number of overflow nodes. + do { + bl_counts[incrBitLen]--; + bl_counts[++incrBitLen]++; + overflow -= 1 << (maxLength - 1 - incrBitLen); + } while (overflow > 0 && incrBitLen < maxLength - 1); + } while (overflow > 0); + + /* We may have overshot above. Move some nodes from maxLength to + * maxLength-1 in that case. + */ + bl_counts[maxLength-1] += overflow; + bl_counts[maxLength-2] -= overflow; + + /* Now recompute all bit lengths, scanning in increasing + * frequency. It is simpler to reconstruct all lengths instead of + * fixing only the wrong ones. This idea is taken from 'ar' + * written by Haruhiko Okumura. + * + * The nodes were inserted with decreasing frequency into the childs + * array. + */ + int nodePtr = 2 * numLeafs; + for (int bits = maxLength; bits != 0; bits--) { + int n = bl_counts[bits-1]; + while (n > 0) { + int childPtr = 2*childs[nodePtr++]; + if (childs[childPtr + 1] == -1) { + // We found another leaf + length[childs[childPtr]] = (byte) bits; + n--; + } + } + } + // if (DeflaterConstants.DEBUGGING) { + // //Console.WriteLine("*** After overflow elimination. ***"); + // for (int i=0; i < numLeafs; i++) { + // //Console.WriteLine("Node "+childs[2*i]+" freq: "+freqs[childs[2*i]] + // + " len: "+length[childs[2*i]]); + // } + // } + } + + } + + #region Instance Fields + /// + /// Pending buffer to use + /// + public DeflaterPending pending; + + Tree literalTree; + Tree distTree; + Tree blTree; + + // Buffer for distances + short[] d_buf; + byte[] l_buf; + int last_lit; + int extra_bits; + #endregion + + static DeflaterHuffman() + { + // See RFC 1951 3.2.6 + // Literal codes + staticLCodes = new short[LITERAL_NUM]; + staticLLength = new byte[LITERAL_NUM]; + + int i = 0; + while (i < 144) { + staticLCodes[i] = BitReverse((0x030 + i) << 8); + staticLLength[i++] = 8; + } + + while (i < 256) { + staticLCodes[i] = BitReverse((0x190 - 144 + i) << 7); + staticLLength[i++] = 9; + } + + while (i < 280) { + staticLCodes[i] = BitReverse((0x000 - 256 + i) << 9); + staticLLength[i++] = 7; + } + + while (i < LITERAL_NUM) { + staticLCodes[i] = BitReverse((0x0c0 - 280 + i) << 8); + staticLLength[i++] = 8; + } + + // Distance codes + staticDCodes = new short[DIST_NUM]; + staticDLength = new byte[DIST_NUM]; + for (i = 0; i < DIST_NUM; i++) { + staticDCodes[i] = BitReverse(i << 11); + staticDLength[i] = 5; + } + } + + /// + /// Construct instance with pending buffer + /// + /// Pending buffer to use + public DeflaterHuffman(DeflaterPending pending) + { + this.pending = pending; + + literalTree = new Tree(this, LITERAL_NUM, 257, 15); + distTree = new Tree(this, DIST_NUM, 1, 15); + blTree = new Tree(this, BITLEN_NUM, 4, 7); + + d_buf = new short[BUFSIZE]; + l_buf = new byte [BUFSIZE]; + } + + /// + /// Reset internal state + /// + public void Reset() + { + last_lit = 0; + extra_bits = 0; + literalTree.Reset(); + distTree.Reset(); + blTree.Reset(); + } + + /// + /// Write all trees to pending buffer + /// + /// The number/rank of treecodes to send. + public void SendAllTrees(int blTreeCodes) + { + blTree.BuildCodes(); + literalTree.BuildCodes(); + distTree.BuildCodes(); + pending.WriteBits(literalTree.numCodes - 257, 5); + pending.WriteBits(distTree.numCodes - 1, 5); + pending.WriteBits(blTreeCodes - 4, 4); + for (int rank = 0; rank < blTreeCodes; rank++) { + pending.WriteBits(blTree.length[BL_ORDER[rank]], 3); + } + literalTree.WriteTree(blTree); + distTree.WriteTree(blTree); + +#if DebugDeflation + if (DeflaterConstants.DEBUGGING) { + blTree.CheckEmpty(); + } +#endif + } + + /// + /// Compress current buffer writing data to pending buffer + /// + public void CompressBlock() + { + for (int i = 0; i < last_lit; i++) { + int litlen = l_buf[i] & 0xff; + int dist = d_buf[i]; + if (dist-- != 0) { + // if (DeflaterConstants.DEBUGGING) { + // Console.Write("["+(dist+1)+","+(litlen+3)+"]: "); + // } + + int lc = Lcode(litlen); + literalTree.WriteSymbol(lc); + + int bits = (lc - 261) / 4; + if (bits > 0 && bits <= 5) { + pending.WriteBits(litlen & ((1 << bits) - 1), bits); + } + + int dc = Dcode(dist); + distTree.WriteSymbol(dc); + + bits = dc / 2 - 1; + if (bits > 0) { + pending.WriteBits(dist & ((1 << bits) - 1), bits); + } + } else { + // if (DeflaterConstants.DEBUGGING) { + // if (litlen > 32 && litlen < 127) { + // Console.Write("("+(char)litlen+"): "); + // } else { + // Console.Write("{"+litlen+"}: "); + // } + // } + literalTree.WriteSymbol(litlen); + } + } + +#if DebugDeflation + if (DeflaterConstants.DEBUGGING) { + Console.Write("EOF: "); + } +#endif + literalTree.WriteSymbol(EOF_SYMBOL); + +#if DebugDeflation + if (DeflaterConstants.DEBUGGING) { + literalTree.CheckEmpty(); + distTree.CheckEmpty(); + } +#endif + } + + /// + /// Flush block to output with no compression + /// + /// Data to write + /// Index of first byte to write + /// Count of bytes to write + /// True if this is the last block + public void FlushStoredBlock(byte[] stored, int storedOffset, int storedLength, bool lastBlock) + { +#if DebugDeflation + // if (DeflaterConstants.DEBUGGING) { + // //Console.WriteLine("Flushing stored block "+ storedLength); + // } +#endif + pending.WriteBits((DeflaterConstants.STORED_BLOCK << 1) + (lastBlock ? 1 : 0), 3); + pending.AlignToByte(); + pending.WriteShort(storedLength); + pending.WriteShort(~storedLength); + pending.WriteBlock(stored, storedOffset, storedLength); + Reset(); + } + + /// + /// Flush block to output with compression + /// + /// Data to flush + /// Index of first byte to flush + /// Count of bytes to flush + /// True if this is the last block + public void FlushBlock(byte[] stored, int storedOffset, int storedLength, bool lastBlock) + { + literalTree.freqs[EOF_SYMBOL]++; + + // Build trees + literalTree.BuildTree(); + distTree.BuildTree(); + + // Calculate bitlen frequency + literalTree.CalcBLFreq(blTree); + distTree.CalcBLFreq(blTree); + + // Build bitlen tree + blTree.BuildTree(); + + int blTreeCodes = 4; + for (int i = 18; i > blTreeCodes; i--) { + if (blTree.length[BL_ORDER[i]] > 0) { + blTreeCodes = i+1; + } + } + int opt_len = 14 + blTreeCodes * 3 + blTree.GetEncodedLength() + + literalTree.GetEncodedLength() + distTree.GetEncodedLength() + + extra_bits; + + int static_len = extra_bits; + for (int i = 0; i < LITERAL_NUM; i++) { + static_len += literalTree.freqs[i] * staticLLength[i]; + } + for (int i = 0; i < DIST_NUM; i++) { + static_len += distTree.freqs[i] * staticDLength[i]; + } + if (opt_len >= static_len) { + // Force static trees + opt_len = static_len; + } + + if (storedOffset >= 0 && storedLength + 4 < opt_len >> 3) { + // Store Block + + // if (DeflaterConstants.DEBUGGING) { + // //Console.WriteLine("Storing, since " + storedLength + " < " + opt_len + // + " <= " + static_len); + // } + FlushStoredBlock(stored, storedOffset, storedLength, lastBlock); + } else if (opt_len == static_len) { + // Encode with static tree + pending.WriteBits((DeflaterConstants.STATIC_TREES << 1) + (lastBlock ? 1 : 0), 3); + literalTree.SetStaticCodes(staticLCodes, staticLLength); + distTree.SetStaticCodes(staticDCodes, staticDLength); + CompressBlock(); + Reset(); + } else { + // Encode with dynamic tree + pending.WriteBits((DeflaterConstants.DYN_TREES << 1) + (lastBlock ? 1 : 0), 3); + SendAllTrees(blTreeCodes); + CompressBlock(); + Reset(); + } + } + + /// + /// Get value indicating if internal buffer is full + /// + /// true if buffer is full + public bool IsFull() + { + return last_lit >= BUFSIZE; + } + + /// + /// Add literal to buffer + /// + /// Literal value to add to buffer. + /// Value indicating internal buffer is full + public bool TallyLit(int literal) + { + // if (DeflaterConstants.DEBUGGING) { + // if (lit > 32 && lit < 127) { + // //Console.WriteLine("("+(char)lit+")"); + // } else { + // //Console.WriteLine("{"+lit+"}"); + // } + // } + d_buf[last_lit] = 0; + l_buf[last_lit++] = (byte)literal; + literalTree.freqs[literal]++; + return IsFull(); + } + + /// + /// Add distance code and length to literal and distance trees + /// + /// Distance code + /// Length + /// Value indicating if internal buffer is full + public bool TallyDist(int distance, int length) + { + // if (DeflaterConstants.DEBUGGING) { + // //Console.WriteLine("[" + distance + "," + length + "]"); + // } + + d_buf[last_lit] = (short)distance; + l_buf[last_lit++] = (byte)(length - 3); + + int lc = Lcode(length - 3); + literalTree.freqs[lc]++; + if (lc >= 265 && lc < 285) { + extra_bits += (lc - 261) / 4; + } + + int dc = Dcode(distance - 1); + distTree.freqs[dc]++; + if (dc >= 4) { + extra_bits += dc / 2 - 1; + } + return IsFull(); + } + + + /// + /// Reverse the bits of a 16 bit value. + /// + /// Value to reverse bits + /// Value with bits reversed + public static short BitReverse(int toReverse) + { + return (short) (bit4Reverse[toReverse & 0xF] << 12 | + bit4Reverse[(toReverse >> 4) & 0xF] << 8 | + bit4Reverse[(toReverse >> 8) & 0xF] << 4 | + bit4Reverse[toReverse >> 12]); + } + + static int Lcode(int length) + { + if (length == 255) { + return 285; + } + + int code = 257; + while (length >= 8) { + code += 4; + length >>= 1; + } + return code + length; + } + + static int Dcode(int distance) + { + int code = 0; + while (distance >= 4) { + code += 2; + distance >>= 1; + } + return code + distance; + } + } +} diff --git a/src/GitHub.Api/SharpZipLib/Zip/Compression/DeflaterPending.cs b/src/GitHub.Api/SharpZipLib/Zip/Compression/DeflaterPending.cs new file mode 100644 index 000000000..dbdcac6b2 --- /dev/null +++ b/src/GitHub.Api/SharpZipLib/Zip/Compression/DeflaterPending.cs @@ -0,0 +1,57 @@ +// DeflaterPending.cs +// +// Copyright (C) 2001 Mike Krueger +// Copyright (C) 2004 John Reilly +// +// This file was translated from java, it was part of the GNU Classpath +// Copyright (C) 2001 Free Software Foundation, Inc. +// +// This program is free software; you can redistribute it and/or +// modify it under the terms of the GNU General Public License +// as published by the Free Software Foundation; either version 2 +// of the License, or (at your option) any later version. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. +// +// You should have received a copy of the GNU General Public License +// along with this program; if not, write to the Free Software +// Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. +// +// Linking this library statically or dynamically with other modules is +// making a combined work based on this library. Thus, the terms and +// conditions of the GNU General Public License cover the whole +// combination. +// +// As a special exception, the copyright holders of this library give you +// permission to link this library with independent modules to produce an +// executable, regardless of the license terms of these independent +// modules, and to copy and distribute the resulting executable under +// terms of your choice, provided that you also meet, for each linked +// independent module, the terms and conditions of the license of that +// module. An independent module is a module which is not derived from +// or based on this library. If you modify this library, you may extend +// this exception to your version of the library, but you are not +// obligated to do so. If you do not wish to do so, delete this +// exception statement from your version. + +namespace GitHub.ICSharpCode.SharpZipLib.Zip.Compression +{ + + /// + /// This class stores the pending output of the Deflater. + /// + /// author of the original java version : Jochen Hoenicke + /// + public class DeflaterPending : PendingBuffer + { + /// + /// Construct instance with default buffer size + /// + public DeflaterPending() : base(DeflaterConstants.PENDING_BUF_SIZE) + { + } + } +} diff --git a/src/GitHub.Api/SharpZipLib/Zip/Compression/Inflater.cs b/src/GitHub.Api/SharpZipLib/Zip/Compression/Inflater.cs new file mode 100644 index 000000000..80f98cbaa --- /dev/null +++ b/src/GitHub.Api/SharpZipLib/Zip/Compression/Inflater.cs @@ -0,0 +1,864 @@ +// Inflater.cs +// +// Copyright (C) 2001 Mike Krueger +// Copyright (C) 2004 John Reilly +// +// This file was translated from java, it was part of the GNU Classpath +// Copyright (C) 2001 Free Software Foundation, Inc. +// +// This program is free software; you can redistribute it and/or +// modify it under the terms of the GNU General Public License +// as published by the Free Software Foundation; either version 2 +// of the License, or (at your option) any later version. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. +// +// You should have received a copy of the GNU General Public License +// along with this program; if not, write to the Free Software +// Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. +// +// Linking this library statically or dynamically with other modules is +// making a combined work based on this library. Thus, the terms and +// conditions of the GNU General Public License cover the whole +// combination. +// +// As a special exception, the copyright holders of this library give you +// permission to link this library with independent modules to produce an +// executable, regardless of the license terms of these independent +// modules, and to copy and distribute the resulting executable under +// terms of your choice, provided that you also meet, for each linked +// independent module, the terms and conditions of the license of that +// module. An independent module is a module which is not derived from +// or based on this library. If you modify this library, you may extend +// this exception to your version of the library, but you are not +// obligated to do so. If you do not wish to do so, delete this +// exception statement from your version. + +using System; + +using GitHub.ICSharpCode.SharpZipLib.Checksums; +using GitHub.ICSharpCode.SharpZipLib.Zip.Compression.Streams; + +namespace GitHub.ICSharpCode.SharpZipLib.Zip.Compression +{ + /// + /// Inflater is used to decompress data that has been compressed according + /// to the "deflate" standard described in rfc1951. + /// + /// By default Zlib (rfc1950) headers and footers are expected in the input. + /// You can use constructor public Inflater(bool noHeader) passing true + /// if there is no Zlib header information + /// + /// The usage is as following. First you have to set some input with + /// SetInput(), then Inflate() it. If inflate doesn't + /// inflate any bytes there may be three reasons: + ///
    + ///
  • IsNeedingInput() returns true because the input buffer is empty. + /// You have to provide more input with SetInput(). + /// NOTE: IsNeedingInput() also returns true when, the stream is finished. + ///
  • + ///
  • IsNeedingDictionary() returns true, you have to provide a preset + /// dictionary with SetDictionary().
  • + ///
  • IsFinished returns true, the inflater has finished.
  • + ///
+ /// Once the first output byte is produced, a dictionary will not be + /// needed at a later stage. + /// + /// author of the original java version : John Leuner, Jochen Hoenicke + ///
+ public class Inflater + { + #region Constants/Readonly + /// + /// Copy lengths for literal codes 257..285 + /// + static readonly int[] CPLENS = { + 3, 4, 5, 6, 7, 8, 9, 10, 11, 13, 15, 17, 19, 23, 27, 31, + 35, 43, 51, 59, 67, 83, 99, 115, 131, 163, 195, 227, 258 + }; + + /// + /// Extra bits for literal codes 257..285 + /// + static readonly int[] CPLEXT = { + 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 2, 2, 2, 2, + 3, 3, 3, 3, 4, 4, 4, 4, 5, 5, 5, 5, 0 + }; + + /// + /// Copy offsets for distance codes 0..29 + /// + static readonly int[] CPDIST = { + 1, 2, 3, 4, 5, 7, 9, 13, 17, 25, 33, 49, 65, 97, 129, 193, + 257, 385, 513, 769, 1025, 1537, 2049, 3073, 4097, 6145, + 8193, 12289, 16385, 24577 + }; + + /// + /// Extra bits for distance codes + /// + static readonly int[] CPDEXT = { + 0, 0, 0, 0, 1, 1, 2, 2, 3, 3, 4, 4, 5, 5, 6, 6, + 7, 7, 8, 8, 9, 9, 10, 10, 11, 11, + 12, 12, 13, 13 + }; + + /// + /// These are the possible states for an inflater + /// + const int DECODE_HEADER = 0; + const int DECODE_DICT = 1; + const int DECODE_BLOCKS = 2; + const int DECODE_STORED_LEN1 = 3; + const int DECODE_STORED_LEN2 = 4; + const int DECODE_STORED = 5; + const int DECODE_DYN_HEADER = 6; + const int DECODE_HUFFMAN = 7; + const int DECODE_HUFFMAN_LENBITS = 8; + const int DECODE_HUFFMAN_DIST = 9; + const int DECODE_HUFFMAN_DISTBITS = 10; + const int DECODE_CHKSUM = 11; + const int FINISHED = 12; + #endregion + + #region Instance Fields + /// + /// This variable contains the current state. + /// + int mode; + + /// + /// The adler checksum of the dictionary or of the decompressed + /// stream, as it is written in the header resp. footer of the + /// compressed stream. + /// Only valid if mode is DECODE_DICT or DECODE_CHKSUM. + /// + int readAdler; + + /// + /// The number of bits needed to complete the current state. This + /// is valid, if mode is DECODE_DICT, DECODE_CHKSUM, + /// DECODE_HUFFMAN_LENBITS or DECODE_HUFFMAN_DISTBITS. + /// + int neededBits; + int repLength; + int repDist; + int uncomprLen; + + /// + /// True, if the last block flag was set in the last block of the + /// inflated stream. This means that the stream ends after the + /// current block. + /// + bool isLastBlock; + + /// + /// The total number of inflated bytes. + /// + long totalOut; + + /// + /// The total number of bytes set with setInput(). This is not the + /// value returned by the TotalIn property, since this also includes the + /// unprocessed input. + /// + long totalIn; + + /// + /// This variable stores the noHeader flag that was given to the constructor. + /// True means, that the inflated stream doesn't contain a Zlib header or + /// footer. + /// + bool noHeader; + + StreamManipulator input; + OutputWindow outputWindow; + InflaterDynHeader dynHeader; + InflaterHuffmanTree litlenTree, distTree; + Adler32 adler; + #endregion + + #region Constructors + /// + /// Creates a new inflater or RFC1951 decompressor + /// RFC1950/Zlib headers and footers will be expected in the input data + /// + public Inflater() : this(false) + { + } + + /// + /// Creates a new inflater. + /// + /// + /// True if no RFC1950/Zlib header and footer fields are expected in the input data + /// + /// This is used for GZIPed/Zipped input. + /// + /// For compatibility with + /// Sun JDK you should provide one byte of input more than needed in + /// this case. + /// + public Inflater(bool noHeader) + { + this.noHeader = noHeader; + this.adler = new Adler32(); + input = new StreamManipulator(); + outputWindow = new OutputWindow(); + mode = noHeader ? DECODE_BLOCKS : DECODE_HEADER; + } + #endregion + + /// + /// Resets the inflater so that a new stream can be decompressed. All + /// pending input and output will be discarded. + /// + public void Reset() + { + mode = noHeader ? DECODE_BLOCKS : DECODE_HEADER; + totalIn = 0; + totalOut = 0; + input.Reset(); + outputWindow.Reset(); + dynHeader = null; + litlenTree = null; + distTree = null; + isLastBlock = false; + adler.Reset(); + } + + /// + /// Decodes a zlib/RFC1950 header. + /// + /// + /// False if more input is needed. + /// + /// + /// The header is invalid. + /// + private bool DecodeHeader() + { + int header = input.PeekBits(16); + if (header < 0) { + return false; + } + input.DropBits(16); + + // The header is written in "wrong" byte order + header = ((header << 8) | (header >> 8)) & 0xffff; + if (header % 31 != 0) { + throw new SharpZipBaseException("Header checksum illegal"); + } + + if ((header & 0x0f00) != (Deflater.DEFLATED << 8)) { + throw new SharpZipBaseException("Compression Method unknown"); + } + + /* Maximum size of the backwards window in bits. + * We currently ignore this, but we could use it to make the + * inflater window more space efficient. On the other hand the + * full window (15 bits) is needed most times, anyway. + int max_wbits = ((header & 0x7000) >> 12) + 8; + */ + + if ((header & 0x0020) == 0) { // Dictionary flag? + mode = DECODE_BLOCKS; + } else { + mode = DECODE_DICT; + neededBits = 32; + } + return true; + } + + /// + /// Decodes the dictionary checksum after the deflate header. + /// + /// + /// False if more input is needed. + /// + private bool DecodeDict() + { + while (neededBits > 0) { + int dictByte = input.PeekBits(8); + if (dictByte < 0) { + return false; + } + input.DropBits(8); + readAdler = (readAdler << 8) | dictByte; + neededBits -= 8; + } + return false; + } + + /// + /// Decodes the huffman encoded symbols in the input stream. + /// + /// + /// false if more input is needed, true if output window is + /// full or the current block ends. + /// + /// + /// if deflated stream is invalid. + /// + private bool DecodeHuffman() + { + int free = outputWindow.GetFreeSpace(); + while (free >= 258) + { + int symbol; + switch (mode) + { + case DECODE_HUFFMAN: + // This is the inner loop so it is optimized a bit + while (((symbol = litlenTree.GetSymbol(input)) & ~0xff) == 0) + { + outputWindow.Write(symbol); + if (--free < 258) + { + return true; + } + } + + if (symbol < 257) + { + if (symbol < 0) + { + return false; + } + else + { + // symbol == 256: end of block + distTree = null; + litlenTree = null; + mode = DECODE_BLOCKS; + return true; + } + } + + try + { + repLength = CPLENS[symbol - 257]; + neededBits = CPLEXT[symbol - 257]; + } + catch (Exception) + { + throw new SharpZipBaseException("Illegal rep length code"); + } + goto case DECODE_HUFFMAN_LENBITS; // fall through + + case DECODE_HUFFMAN_LENBITS: + if (neededBits > 0) + { + mode = DECODE_HUFFMAN_LENBITS; + int i = input.PeekBits(neededBits); + if (i < 0) + { + return false; + } + input.DropBits(neededBits); + repLength += i; + } + mode = DECODE_HUFFMAN_DIST; + goto case DECODE_HUFFMAN_DIST; // fall through + + case DECODE_HUFFMAN_DIST: + symbol = distTree.GetSymbol(input); + if (symbol < 0) + { + return false; + } + + try + { + repDist = CPDIST[symbol]; + neededBits = CPDEXT[symbol]; + } + catch (Exception) + { + throw new SharpZipBaseException("Illegal rep dist code"); + } + + goto case DECODE_HUFFMAN_DISTBITS; // fall through + + case DECODE_HUFFMAN_DISTBITS: + if (neededBits > 0) + { + mode = DECODE_HUFFMAN_DISTBITS; + int i = input.PeekBits(neededBits); + if (i < 0) + { + return false; + } + input.DropBits(neededBits); + repDist += i; + } + + outputWindow.Repeat(repLength, repDist); + free -= repLength; + mode = DECODE_HUFFMAN; + break; + + default: + throw new SharpZipBaseException("Inflater unknown mode"); + } + } + return true; + } + + /// + /// Decodes the adler checksum after the deflate stream. + /// + /// + /// false if more input is needed. + /// + /// + /// If checksum doesn't match. + /// + private bool DecodeChksum() + { + while (neededBits > 0) { + int chkByte = input.PeekBits(8); + if (chkByte < 0) { + return false; + } + input.DropBits(8); + readAdler = (readAdler << 8) | chkByte; + neededBits -= 8; + } + + if ((int) adler.Value != readAdler) { + throw new SharpZipBaseException("Adler chksum doesn't match: " + (int)adler.Value + " vs. " + readAdler); + } + + mode = FINISHED; + return false; + } + + /// + /// Decodes the deflated stream. + /// + /// + /// false if more input is needed, or if finished. + /// + /// + /// if deflated stream is invalid. + /// + private bool Decode() + { + switch (mode) { + case DECODE_HEADER: + return DecodeHeader(); + + case DECODE_DICT: + return DecodeDict(); + + case DECODE_CHKSUM: + return DecodeChksum(); + + case DECODE_BLOCKS: + if (isLastBlock) { + if (noHeader) { + mode = FINISHED; + return false; + } else { + input.SkipToByteBoundary(); + neededBits = 32; + mode = DECODE_CHKSUM; + return true; + } + } + + int type = input.PeekBits(3); + if (type < 0) { + return false; + } + input.DropBits(3); + + if ((type & 1) != 0) { + isLastBlock = true; + } + switch (type >> 1){ + case DeflaterConstants.STORED_BLOCK: + input.SkipToByteBoundary(); + mode = DECODE_STORED_LEN1; + break; + case DeflaterConstants.STATIC_TREES: + litlenTree = InflaterHuffmanTree.defLitLenTree; + distTree = InflaterHuffmanTree.defDistTree; + mode = DECODE_HUFFMAN; + break; + case DeflaterConstants.DYN_TREES: + dynHeader = new InflaterDynHeader(); + mode = DECODE_DYN_HEADER; + break; + default: + throw new SharpZipBaseException("Unknown block type " + type); + } + return true; + + case DECODE_STORED_LEN1: + { + if ((uncomprLen = input.PeekBits(16)) < 0) { + return false; + } + input.DropBits(16); + mode = DECODE_STORED_LEN2; + } + goto case DECODE_STORED_LEN2; // fall through + + case DECODE_STORED_LEN2: + { + int nlen = input.PeekBits(16); + if (nlen < 0) { + return false; + } + input.DropBits(16); + if (nlen != (uncomprLen ^ 0xffff)) { + throw new SharpZipBaseException("broken uncompressed block"); + } + mode = DECODE_STORED; + } + goto case DECODE_STORED; // fall through + + case DECODE_STORED: + { + int more = outputWindow.CopyStored(input, uncomprLen); + uncomprLen -= more; + if (uncomprLen == 0) { + mode = DECODE_BLOCKS; + return true; + } + return !input.IsNeedingInput; + } + + case DECODE_DYN_HEADER: + if (!dynHeader.Decode(input)) { + return false; + } + + litlenTree = dynHeader.BuildLitLenTree(); + distTree = dynHeader.BuildDistTree(); + mode = DECODE_HUFFMAN; + goto case DECODE_HUFFMAN; // fall through + + case DECODE_HUFFMAN: + case DECODE_HUFFMAN_LENBITS: + case DECODE_HUFFMAN_DIST: + case DECODE_HUFFMAN_DISTBITS: + return DecodeHuffman(); + + case FINISHED: + return false; + + default: + throw new SharpZipBaseException("Inflater.Decode unknown mode"); + } + } + + /// + /// Sets the preset dictionary. This should only be called, if + /// needsDictionary() returns true and it should set the same + /// dictionary, that was used for deflating. The getAdler() + /// function returns the checksum of the dictionary needed. + /// + /// + /// The dictionary. + /// + public void SetDictionary(byte[] buffer) + { + SetDictionary(buffer, 0, buffer.Length); + } + + /// + /// Sets the preset dictionary. This should only be called, if + /// needsDictionary() returns true and it should set the same + /// dictionary, that was used for deflating. The getAdler() + /// function returns the checksum of the dictionary needed. + /// + /// + /// The dictionary. + /// + /// + /// The index into buffer where the dictionary starts. + /// + /// + /// The number of bytes in the dictionary. + /// + /// + /// No dictionary is needed. + /// + /// + /// The adler checksum for the buffer is invalid + /// + public void SetDictionary(byte[] buffer, int index, int count) + { + if ( buffer == null ) { + throw new ArgumentNullException("buffer"); + } + + if ( index < 0 ) { + throw new ArgumentOutOfRangeException("index"); + } + + if ( count < 0 ) { + throw new ArgumentOutOfRangeException("count"); + } + + if (!IsNeedingDictionary) { + throw new InvalidOperationException("Dictionary is not needed"); + } + + adler.Update(buffer, index, count); + + if ((int)adler.Value != readAdler) { + throw new SharpZipBaseException("Wrong adler checksum"); + } + adler.Reset(); + outputWindow.CopyDict(buffer, index, count); + mode = DECODE_BLOCKS; + } + + /// + /// Sets the input. This should only be called, if needsInput() + /// returns true. + /// + /// + /// the input. + /// + public void SetInput(byte[] buffer) + { + SetInput(buffer, 0, buffer.Length); + } + + /// + /// Sets the input. This should only be called, if needsInput() + /// returns true. + /// + /// + /// The source of input data + /// + /// + /// The index into buffer where the input starts. + /// + /// + /// The number of bytes of input to use. + /// + /// + /// No input is needed. + /// + /// + /// The index and/or count are wrong. + /// + public void SetInput(byte[] buffer, int index, int count) + { + input.SetInput(buffer, index, count); + totalIn += (long)count; + } + + /// + /// Inflates the compressed stream to the output buffer. If this + /// returns 0, you should check, whether IsNeedingDictionary(), + /// IsNeedingInput() or IsFinished() returns true, to determine why no + /// further output is produced. + /// + /// + /// the output buffer. + /// + /// + /// The number of bytes written to the buffer, 0 if no further + /// output can be produced. + /// + /// + /// if buffer has length 0. + /// + /// + /// if deflated stream is invalid. + /// + public int Inflate(byte[] buffer) + { + if ( buffer == null ) + { + throw new ArgumentNullException("buffer"); + } + + return Inflate(buffer, 0, buffer.Length); + } + + /// + /// Inflates the compressed stream to the output buffer. If this + /// returns 0, you should check, whether needsDictionary(), + /// needsInput() or finished() returns true, to determine why no + /// further output is produced. + /// + /// + /// the output buffer. + /// + /// + /// the offset in buffer where storing starts. + /// + /// + /// the maximum number of bytes to output. + /// + /// + /// the number of bytes written to the buffer, 0 if no further output can be produced. + /// + /// + /// if count is less than 0. + /// + /// + /// if the index and / or count are wrong. + /// + /// + /// if deflated stream is invalid. + /// + public int Inflate(byte[] buffer, int offset, int count) + { + if ( buffer == null ) + { + throw new ArgumentNullException("buffer"); + } + + if ( count < 0 ) { +#if NETCF_1_0 + throw new ArgumentOutOfRangeException("count"); +#else + throw new ArgumentOutOfRangeException("count", "count cannot be negative"); +#endif + } + + if ( offset < 0 ) { +#if NETCF_1_0 + throw new ArgumentOutOfRangeException("offset"); +#else + throw new ArgumentOutOfRangeException("offset", "offset cannot be negative"); +#endif + } + + if ( offset + count > buffer.Length ) { + throw new ArgumentException("count exceeds buffer bounds"); + } + + // Special case: count may be zero + if (count == 0) + { + if (!IsFinished) { // -jr- 08-Nov-2003 INFLATE_BUG fix.. + Decode(); + } + return 0; + } + + int bytesCopied = 0; + + do { + if (mode != DECODE_CHKSUM) { + /* Don't give away any output, if we are waiting for the + * checksum in the input stream. + * + * With this trick we have always: + * IsNeedingInput() and not IsFinished() + * implies more output can be produced. + */ + int more = outputWindow.CopyOutput(buffer, offset, count); + if ( more > 0 ) { + adler.Update(buffer, offset, more); + offset += more; + bytesCopied += more; + totalOut += (long)more; + count -= more; + if (count == 0) { + return bytesCopied; + } + } + } + } while (Decode() || ((outputWindow.GetAvailable() > 0) && (mode != DECODE_CHKSUM))); + return bytesCopied; + } + + /// + /// Returns true, if the input buffer is empty. + /// You should then call setInput(). + /// NOTE: This method also returns true when the stream is finished. + /// + public bool IsNeedingInput { + get { + return input.IsNeedingInput; + } + } + + /// + /// Returns true, if a preset dictionary is needed to inflate the input. + /// + public bool IsNeedingDictionary { + get { + return mode == DECODE_DICT && neededBits == 0; + } + } + + /// + /// Returns true, if the inflater has finished. This means, that no + /// input is needed and no output can be produced. + /// + public bool IsFinished { + get { + return mode == FINISHED && outputWindow.GetAvailable() == 0; + } + } + + /// + /// Gets the adler checksum. This is either the checksum of all + /// uncompressed bytes returned by inflate(), or if needsDictionary() + /// returns true (and thus no output was yet produced) this is the + /// adler checksum of the expected dictionary. + /// + /// + /// the adler checksum. + /// + public int Adler { + get { + return IsNeedingDictionary ? readAdler : (int) adler.Value; + } + } + + /// + /// Gets the total number of output bytes returned by Inflate(). + /// + /// + /// the total number of output bytes. + /// + public long TotalOut { + get { + return totalOut; + } + } + + /// + /// Gets the total number of processed compressed input bytes. + /// + /// + /// The total number of bytes of processed input bytes. + /// + public long TotalIn { + get { + return totalIn - (long)RemainingInput; + } + } + + /// + /// Gets the number of unprocessed input bytes. Useful, if the end of the + /// stream is reached and you want to further process the bytes after + /// the deflate stream. + /// + /// + /// The number of bytes of the input which have not been processed. + /// + public int RemainingInput { + // TODO: This should be a long? + get { + return input.AvailableBytes; + } + } + } +} diff --git a/src/GitHub.Api/SharpZipLib/Zip/Compression/InflaterDynHeader.cs b/src/GitHub.Api/SharpZipLib/Zip/Compression/InflaterDynHeader.cs new file mode 100644 index 000000000..cb019b8b5 --- /dev/null +++ b/src/GitHub.Api/SharpZipLib/Zip/Compression/InflaterDynHeader.cs @@ -0,0 +1,218 @@ +// InflaterDynHeader.cs +// Copyright (C) 2001 Mike Krueger +// +// This file was translated from java, it was part of the GNU Classpath +// Copyright (C) 2001 Free Software Foundation, Inc. +// +// This program is free software; you can redistribute it and/or +// modify it under the terms of the GNU General Public License +// as published by the Free Software Foundation; either version 2 +// of the License, or (at your option) any later version. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. +// +// You should have received a copy of the GNU General Public License +// along with this program; if not, write to the Free Software +// Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. +// +// Linking this library statically or dynamically with other modules is +// making a combined work based on this library. Thus, the terms and +// conditions of the GNU General Public License cover the whole +// combination. +// +// As a special exception, the copyright holders of this library give you +// permission to link this library with independent modules to produce an +// executable, regardless of the license terms of these independent +// modules, and to copy and distribute the resulting executable under +// terms of your choice, provided that you also meet, for each linked +// independent module, the terms and conditions of the license of that +// module. An independent module is a module which is not derived from +// or based on this library. If you modify this library, you may extend +// this exception to your version of the library, but you are not +// obligated to do so. If you do not wish to do so, delete this +// exception statement from your version. + +using System; + +using GitHub.ICSharpCode.SharpZipLib.Zip.Compression.Streams; + +namespace GitHub.ICSharpCode.SharpZipLib.Zip.Compression +{ + + class InflaterDynHeader + { + #region Constants + const int LNUM = 0; + const int DNUM = 1; + const int BLNUM = 2; + const int BLLENS = 3; + const int LENS = 4; + const int REPS = 5; + + static readonly int[] repMin = { 3, 3, 11 }; + static readonly int[] repBits = { 2, 3, 7 }; + + static readonly int[] BL_ORDER = + { 16, 17, 18, 0, 8, 7, 9, 6, 10, 5, 11, 4, 12, 3, 13, 2, 14, 1, 15 }; + + #endregion + + #region Constructors + public InflaterDynHeader() + { + } + #endregion + + public bool Decode(StreamManipulator input) + { + decode_loop: + for (;;) { + switch (mode) { + case LNUM: + lnum = input.PeekBits(5); + if (lnum < 0) { + return false; + } + lnum += 257; + input.DropBits(5); + // System.err.println("LNUM: "+lnum); + mode = DNUM; + goto case DNUM; // fall through + case DNUM: + dnum = input.PeekBits(5); + if (dnum < 0) { + return false; + } + dnum++; + input.DropBits(5); + // System.err.println("DNUM: "+dnum); + num = lnum+dnum; + litdistLens = new byte[num]; + mode = BLNUM; + goto case BLNUM; // fall through + case BLNUM: + blnum = input.PeekBits(4); + if (blnum < 0) { + return false; + } + blnum += 4; + input.DropBits(4); + blLens = new byte[19]; + ptr = 0; + // System.err.println("BLNUM: "+blnum); + mode = BLLENS; + goto case BLLENS; // fall through + case BLLENS: + while (ptr < blnum) { + int len = input.PeekBits(3); + if (len < 0) { + return false; + } + input.DropBits(3); + // System.err.println("blLens["+BL_ORDER[ptr]+"]: "+len); + blLens[BL_ORDER[ptr]] = (byte) len; + ptr++; + } + blTree = new InflaterHuffmanTree(blLens); + blLens = null; + ptr = 0; + mode = LENS; + goto case LENS; // fall through + case LENS: + { + int symbol; + while (((symbol = blTree.GetSymbol(input)) & ~15) == 0) { + /* Normal case: symbol in [0..15] */ + + // System.err.println("litdistLens["+ptr+"]: "+symbol); + litdistLens[ptr++] = lastLen = (byte)symbol; + + if (ptr == num) { + /* Finished */ + return true; + } + } + + /* need more input ? */ + if (symbol < 0) { + return false; + } + + /* otherwise repeat code */ + if (symbol >= 17) { + /* repeat zero */ + // System.err.println("repeating zero"); + lastLen = 0; + } else { + if (ptr == 0) { + throw new SharpZipBaseException(); + } + } + repSymbol = symbol-16; + } + mode = REPS; + goto case REPS; // fall through + case REPS: + { + int bits = repBits[repSymbol]; + int count = input.PeekBits(bits); + if (count < 0) { + return false; + } + input.DropBits(bits); + count += repMin[repSymbol]; + // System.err.println("litdistLens repeated: "+count); + + if (ptr + count > num) { + throw new SharpZipBaseException(); + } + while (count-- > 0) { + litdistLens[ptr++] = lastLen; + } + + if (ptr == num) { + /* Finished */ + return true; + } + } + mode = LENS; + goto decode_loop; + } + } + } + + public InflaterHuffmanTree BuildLitLenTree() + { + byte[] litlenLens = new byte[lnum]; + Array.Copy(litdistLens, 0, litlenLens, 0, lnum); + return new InflaterHuffmanTree(litlenLens); + } + + public InflaterHuffmanTree BuildDistTree() + { + byte[] distLens = new byte[dnum]; + Array.Copy(litdistLens, lnum, distLens, 0, dnum); + return new InflaterHuffmanTree(distLens); + } + + #region Instance Fields + byte[] blLens; + byte[] litdistLens; + + InflaterHuffmanTree blTree; + + /// + /// The current decode mode + /// + int mode; + int lnum, dnum, blnum, num; + int repSymbol; + byte lastLen; + int ptr; + #endregion + + } +} diff --git a/src/GitHub.Api/SharpZipLib/Zip/Compression/InflaterHuffmanTree.cs b/src/GitHub.Api/SharpZipLib/Zip/Compression/InflaterHuffmanTree.cs new file mode 100644 index 000000000..e1467fa82 --- /dev/null +++ b/src/GitHub.Api/SharpZipLib/Zip/Compression/InflaterHuffmanTree.cs @@ -0,0 +1,232 @@ +// InflaterHuffmanTree.cs +// Copyright (C) 2001 Mike Krueger +// +// This file was translated from java, it was part of the GNU Classpath +// Copyright (C) 2001 Free Software Foundation, Inc. +// +// This program is free software; you can redistribute it and/or +// modify it under the terms of the GNU General Public License +// as published by the Free Software Foundation; either version 2 +// of the License, or (at your option) any later version. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. +// +// You should have received a copy of the GNU General Public License +// along with this program; if not, write to the Free Software +// Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. +// +// Linking this library statically or dynamically with other modules is +// making a combined work based on this library. Thus, the terms and +// conditions of the GNU General Public License cover the whole +// combination. +// +// As a special exception, the copyright holders of this library give you +// permission to link this library with independent modules to produce an +// executable, regardless of the license terms of these independent +// modules, and to copy and distribute the resulting executable under +// terms of your choice, provided that you also meet, for each linked +// independent module, the terms and conditions of the license of that +// module. An independent module is a module which is not derived from +// or based on this library. If you modify this library, you may extend +// this exception to your version of the library, but you are not +// obligated to do so. If you do not wish to do so, delete this +// exception statement from your version. + +using System; + +using GitHub.ICSharpCode.SharpZipLib.Zip.Compression.Streams; + +namespace GitHub.ICSharpCode.SharpZipLib.Zip.Compression +{ + + /// + /// Huffman tree used for inflation + /// + public class InflaterHuffmanTree + { + #region Constants + const int MAX_BITLEN = 15; + #endregion + + #region Instance Fields + short[] tree; + #endregion + + /// + /// Literal length tree + /// + public static InflaterHuffmanTree defLitLenTree; + + /// + /// Distance tree + /// + public static InflaterHuffmanTree defDistTree; + + static InflaterHuffmanTree() + { + try { + byte[] codeLengths = new byte[288]; + int i = 0; + while (i < 144) { + codeLengths[i++] = 8; + } + while (i < 256) { + codeLengths[i++] = 9; + } + while (i < 280) { + codeLengths[i++] = 7; + } + while (i < 288) { + codeLengths[i++] = 8; + } + defLitLenTree = new InflaterHuffmanTree(codeLengths); + + codeLengths = new byte[32]; + i = 0; + while (i < 32) { + codeLengths[i++] = 5; + } + defDistTree = new InflaterHuffmanTree(codeLengths); + } catch (Exception) { + throw new SharpZipBaseException("InflaterHuffmanTree: static tree length illegal"); + } + } + + #region Constructors + /// + /// Constructs a Huffman tree from the array of code lengths. + /// + /// + /// the array of code lengths + /// + public InflaterHuffmanTree(byte[] codeLengths) + { + BuildTree(codeLengths); + } + #endregion + + void BuildTree(byte[] codeLengths) + { + int[] blCount = new int[MAX_BITLEN + 1]; + int[] nextCode = new int[MAX_BITLEN + 1]; + + for (int i = 0; i < codeLengths.Length; i++) { + int bits = codeLengths[i]; + if (bits > 0) { + blCount[bits]++; + } + } + + int code = 0; + int treeSize = 512; + for (int bits = 1; bits <= MAX_BITLEN; bits++) { + nextCode[bits] = code; + code += blCount[bits] << (16 - bits); + if (bits >= 10) { + /* We need an extra table for bit lengths >= 10. */ + int start = nextCode[bits] & 0x1ff80; + int end = code & 0x1ff80; + treeSize += (end - start) >> (16 - bits); + } + } + +/* -jr comment this out! doesnt work for dynamic trees and pkzip 2.04g + if (code != 65536) + { + throw new SharpZipBaseException("Code lengths don't add up properly."); + } +*/ + /* Now create and fill the extra tables from longest to shortest + * bit len. This way the sub trees will be aligned. + */ + tree = new short[treeSize]; + int treePtr = 512; + for (int bits = MAX_BITLEN; bits >= 10; bits--) { + int end = code & 0x1ff80; + code -= blCount[bits] << (16 - bits); + int start = code & 0x1ff80; + for (int i = start; i < end; i += 1 << 7) { + tree[DeflaterHuffman.BitReverse(i)] = (short) ((-treePtr << 4) | bits); + treePtr += 1 << (bits-9); + } + } + + for (int i = 0; i < codeLengths.Length; i++) { + int bits = codeLengths[i]; + if (bits == 0) { + continue; + } + code = nextCode[bits]; + int revcode = DeflaterHuffman.BitReverse(code); + if (bits <= 9) { + do { + tree[revcode] = (short) ((i << 4) | bits); + revcode += 1 << bits; + } while (revcode < 512); + } else { + int subTree = tree[revcode & 511]; + int treeLen = 1 << (subTree & 15); + subTree = -(subTree >> 4); + do { + tree[subTree | (revcode >> 9)] = (short) ((i << 4) | bits); + revcode += 1 << bits; + } while (revcode < treeLen); + } + nextCode[bits] = code + (1 << (16 - bits)); + } + + } + + /// + /// Reads the next symbol from input. The symbol is encoded using the + /// huffman tree. + /// + /// + /// input the input source. + /// + /// + /// the next symbol, or -1 if not enough input is available. + /// + public int GetSymbol(StreamManipulator input) + { + int lookahead, symbol; + if ((lookahead = input.PeekBits(9)) >= 0) { + if ((symbol = tree[lookahead]) >= 0) { + input.DropBits(symbol & 15); + return symbol >> 4; + } + int subtree = -(symbol >> 4); + int bitlen = symbol & 15; + if ((lookahead = input.PeekBits(bitlen)) >= 0) { + symbol = tree[subtree | (lookahead >> 9)]; + input.DropBits(symbol & 15); + return symbol >> 4; + } else { + int bits = input.AvailableBits; + lookahead = input.PeekBits(bits); + symbol = tree[subtree | (lookahead >> 9)]; + if ((symbol & 15) <= bits) { + input.DropBits(symbol & 15); + return symbol >> 4; + } else { + return -1; + } + } + } else { + int bits = input.AvailableBits; + lookahead = input.PeekBits(bits); + symbol = tree[lookahead]; + if (symbol >= 0 && (symbol & 15) <= bits) { + input.DropBits(symbol & 15); + return symbol >> 4; + } else { + return -1; + } + } + } + } +} + diff --git a/src/GitHub.Api/SharpZipLib/Zip/Compression/PendingBuffer.cs b/src/GitHub.Api/SharpZipLib/Zip/Compression/PendingBuffer.cs new file mode 100644 index 000000000..1ea1fb4b2 --- /dev/null +++ b/src/GitHub.Api/SharpZipLib/Zip/Compression/PendingBuffer.cs @@ -0,0 +1,295 @@ +// PendingBuffer.cs +// +// Copyright (C) 2001 Mike Krueger +// Copyright (C) 2004 John Reilly +// +// This file was translated from java, it was part of the GNU Classpath +// Copyright (C) 2001 Free Software Foundation, Inc. +// +// This program is free software; you can redistribute it and/or +// modify it under the terms of the GNU General Public License +// as published by the Free Software Foundation; either version 2 +// of the License, or (at your option) any later version. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. +// +// You should have received a copy of the GNU General Public License +// along with this program; if not, write to the Free Software +// Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. +// +// Linking this library statically or dynamically with other modules is +// making a combined work based on this library. Thus, the terms and +// conditions of the GNU General Public License cover the whole +// combination. +// +// As a special exception, the copyright holders of this library give you +// permission to link this library with independent modules to produce an +// executable, regardless of the license terms of these independent +// modules, and to copy and distribute the resulting executable under +// terms of your choice, provided that you also meet, for each linked +// independent module, the terms and conditions of the license of that +// module. An independent module is a module which is not derived from +// or based on this library. If you modify this library, you may extend +// this exception to your version of the library, but you are not +// obligated to do so. If you do not wish to do so, delete this +// exception statement from your version. + +using System; + +namespace GitHub.ICSharpCode.SharpZipLib.Zip.Compression +{ + + /// + /// This class is general purpose class for writing data to a buffer. + /// + /// It allows you to write bits as well as bytes + /// Based on DeflaterPending.java + /// + /// author of the original java version : Jochen Hoenicke + /// + public class PendingBuffer + { + #region Instance Fields + /// + /// Internal work buffer + /// + byte[] buffer_; + + int start; + int end; + + uint bits; + int bitCount; + #endregion + + #region Constructors + /// + /// construct instance using default buffer size of 4096 + /// + public PendingBuffer() : this( 4096 ) + { + } + + /// + /// construct instance using specified buffer size + /// + /// + /// size to use for internal buffer + /// + public PendingBuffer(int bufferSize) + { + buffer_ = new byte[bufferSize]; + } + + #endregion + + /// + /// Clear internal state/buffers + /// + public void Reset() + { + start = end = bitCount = 0; + } + + /// + /// Write a byte to buffer + /// + /// + /// The value to write + /// + public void WriteByte(int value) + { +#if DebugDeflation + if (DeflaterConstants.DEBUGGING && (start != 0) ) + { + throw new SharpZipBaseException("Debug check: start != 0"); + } +#endif + buffer_[end++] = unchecked((byte) value); + } + + /// + /// Write a short value to buffer LSB first + /// + /// + /// The value to write. + /// + public void WriteShort(int value) + { +#if DebugDeflation + if (DeflaterConstants.DEBUGGING && (start != 0) ) + { + throw new SharpZipBaseException("Debug check: start != 0"); + } +#endif + buffer_[end++] = unchecked((byte) value); + buffer_[end++] = unchecked((byte) (value >> 8)); + } + + /// + /// write an integer LSB first + /// + /// The value to write. + public void WriteInt(int value) + { +#if DebugDeflation + if (DeflaterConstants.DEBUGGING && (start != 0) ) + { + throw new SharpZipBaseException("Debug check: start != 0"); + } +#endif + buffer_[end++] = unchecked((byte) value); + buffer_[end++] = unchecked((byte) (value >> 8)); + buffer_[end++] = unchecked((byte) (value >> 16)); + buffer_[end++] = unchecked((byte) (value >> 24)); + } + + /// + /// Write a block of data to buffer + /// + /// data to write + /// offset of first byte to write + /// number of bytes to write + public void WriteBlock(byte[] block, int offset, int length) + { +#if DebugDeflation + if (DeflaterConstants.DEBUGGING && (start != 0) ) + { + throw new SharpZipBaseException("Debug check: start != 0"); + } +#endif + System.Array.Copy(block, offset, buffer_, end, length); + end += length; + } + + /// + /// The number of bits written to the buffer + /// + public int BitCount { + get { + return bitCount; + } + } + + /// + /// Align internal buffer on a byte boundary + /// + public void AlignToByte() + { +#if DebugDeflation + if (DeflaterConstants.DEBUGGING && (start != 0) ) + { + throw new SharpZipBaseException("Debug check: start != 0"); + } +#endif + if (bitCount > 0) + { + buffer_[end++] = unchecked((byte) bits); + if (bitCount > 8) { + buffer_[end++] = unchecked((byte) (bits >> 8)); + } + } + bits = 0; + bitCount = 0; + } + + /// + /// Write bits to internal buffer + /// + /// source of bits + /// number of bits to write + public void WriteBits(int b, int count) + { +#if DebugDeflation + if (DeflaterConstants.DEBUGGING && (start != 0) ) + { + throw new SharpZipBaseException("Debug check: start != 0"); + } + + // if (DeflaterConstants.DEBUGGING) { + // //Console.WriteLine("writeBits("+b+","+count+")"); + // } +#endif + bits |= (uint)(b << bitCount); + bitCount += count; + if (bitCount >= 16) { + buffer_[end++] = unchecked((byte) bits); + buffer_[end++] = unchecked((byte) (bits >> 8)); + bits >>= 16; + bitCount -= 16; + } + } + + /// + /// Write a short value to internal buffer most significant byte first + /// + /// value to write + public void WriteShortMSB(int s) + { +#if DebugDeflation + if (DeflaterConstants.DEBUGGING && (start != 0) ) + { + throw new SharpZipBaseException("Debug check: start != 0"); + } +#endif + buffer_[end++] = unchecked((byte) (s >> 8)); + buffer_[end++] = unchecked((byte) s); + } + + /// + /// Indicates if buffer has been flushed + /// + public bool IsFlushed { + get { + return end == 0; + } + } + + /// + /// Flushes the pending buffer into the given output array. If the + /// output array is to small, only a partial flush is done. + /// + /// The output array. + /// The offset into output array. + /// The maximum number of bytes to store. + /// The number of bytes flushed. + public int Flush(byte[] output, int offset, int length) + { + if (bitCount >= 8) { + buffer_[end++] = unchecked((byte) bits); + bits >>= 8; + bitCount -= 8; + } + + if (length > end - start) { + length = end - start; + System.Array.Copy(buffer_, start, output, offset, length); + start = 0; + end = 0; + } else { + System.Array.Copy(buffer_, start, output, offset, length); + start += length; + } + return length; + } + + /// + /// Convert internal buffer to byte array. + /// Buffer is empty on completion + /// + /// + /// The internal buffer contents converted to a byte array. + /// + public byte[] ToByteArray() + { + byte[] result = new byte[end - start]; + System.Array.Copy(buffer_, start, result, 0, result.Length); + start = 0; + end = 0; + return result; + } + } +} diff --git a/src/GitHub.Api/SharpZipLib/Zip/Compression/Streams/DeflaterOutputStream.cs b/src/GitHub.Api/SharpZipLib/Zip/Compression/Streams/DeflaterOutputStream.cs new file mode 100644 index 000000000..9adb557c0 --- /dev/null +++ b/src/GitHub.Api/SharpZipLib/Zip/Compression/Streams/DeflaterOutputStream.cs @@ -0,0 +1,602 @@ +// DeflaterOutputStream.cs +// +// Copyright (C) 2001 Mike Krueger +// +// This file was translated from java, it was part of the GNU Classpath +// Copyright (C) 2001 Free Software Foundation, Inc. +// +// This program is free software; you can redistribute it and/or +// modify it under the terms of the GNU General Public License +// as published by the Free Software Foundation; either version 2 +// of the License, or (at your option) any later version. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. +// +// You should have received a copy of the GNU General Public License +// along with this program; if not, write to the Free Software +// Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. +// +// Linking this library statically or dynamically with other modules is +// making a combined work based on this library. Thus, the terms and +// conditions of the GNU General Public License cover the whole +// combination. +// +// As a special exception, the copyright holders of this library give you +// permission to link this library with independent modules to produce an +// executable, regardless of the license terms of these independent +// modules, and to copy and distribute the resulting executable under +// terms of your choice, provided that you also meet, for each linked +// independent module, the terms and conditions of the license of that +// module. An independent module is a module which is not derived from +// or based on this library. If you modify this library, you may extend +// this exception to your version of the library, but you are not +// obligated to do so. If you do not wish to do so, delete this +// exception statement from your version. + +// HISTORY +// 22-12-2009 DavidPierson Added AES support + +using System; +using System.IO; + +#if !NETCF_1_0 +using System.Security.Cryptography; +using GitHub.ICSharpCode.SharpZipLib.Encryption; +#endif + +namespace GitHub.ICSharpCode.SharpZipLib.Zip.Compression.Streams +{ + /// + /// A special stream deflating or compressing the bytes that are + /// written to it. It uses a Deflater to perform actual deflating.
+ /// Authors of the original java version : Tom Tromey, Jochen Hoenicke + ///
+ public class DeflaterOutputStream : Stream + { + #region Constructors + /// + /// Creates a new DeflaterOutputStream with a default Deflater and default buffer size. + /// + /// + /// the output stream where deflated output should be written. + /// + public DeflaterOutputStream(Stream baseOutputStream) + : this(baseOutputStream, new Deflater(), 512) + { + } + + /// + /// Creates a new DeflaterOutputStream with the given Deflater and + /// default buffer size. + /// + /// + /// the output stream where deflated output should be written. + /// + /// + /// the underlying deflater. + /// + public DeflaterOutputStream(Stream baseOutputStream, Deflater deflater) + : this(baseOutputStream, deflater, 512) + { + } + + /// + /// Creates a new DeflaterOutputStream with the given Deflater and + /// buffer size. + /// + /// + /// The output stream where deflated output is written. + /// + /// + /// The underlying deflater to use + /// + /// + /// The buffer size in bytes to use when deflating (minimum value 512) + /// + /// + /// bufsize is less than or equal to zero. + /// + /// + /// baseOutputStream does not support writing + /// + /// + /// deflater instance is null + /// + public DeflaterOutputStream(Stream baseOutputStream, Deflater deflater, int bufferSize) + { + if ( baseOutputStream == null ) { + throw new ArgumentNullException("baseOutputStream"); + } + + if (baseOutputStream.CanWrite == false) { + throw new ArgumentException("Must support writing", "baseOutputStream"); + } + + if (deflater == null) { + throw new ArgumentNullException("deflater"); + } + + if (bufferSize < 512) { + throw new ArgumentOutOfRangeException("bufferSize"); + } + + baseOutputStream_ = baseOutputStream; + buffer_ = new byte[bufferSize]; + deflater_ = deflater; + } + #endregion + + #region Public API + /// + /// Finishes the stream by calling finish() on the deflater. + /// + /// + /// Not all input is deflated + /// + public virtual void Finish() + { + deflater_.Finish(); + while (!deflater_.IsFinished) { + int len = deflater_.Deflate(buffer_, 0, buffer_.Length); + if (len <= 0) { + break; + } + +#if NETCF_1_0 + if ( keys != null ) { +#else + if (cryptoTransform_ != null) { +#endif + EncryptBlock(buffer_, 0, len); + } + + baseOutputStream_.Write(buffer_, 0, len); + } + + if (!deflater_.IsFinished) { + throw new SharpZipBaseException("Can't deflate all input?"); + } + + baseOutputStream_.Flush(); + +#if NETCF_1_0 + if ( keys != null ) { + keys = null; + } +#else + if (cryptoTransform_ != null) { +#if !NET_1_1 && !NETCF_2_0 + if (cryptoTransform_ is ZipAESTransform) { + AESAuthCode = ((ZipAESTransform)cryptoTransform_).GetAuthCode(); + } +#endif + cryptoTransform_.Dispose(); + cryptoTransform_ = null; + } +#endif + } + + /// + /// Get/set flag indicating ownership of the underlying stream. + /// When the flag is true will close the underlying stream also. + /// + public bool IsStreamOwner + { + get { return isStreamOwner_; } + set { isStreamOwner_ = value; } + } + + /// + /// Allows client to determine if an entry can be patched after its added + /// + public bool CanPatchEntries { + get { + return baseOutputStream_.CanSeek; + } + } + + #endregion + + #region Encryption + + string password; + +#if NETCF_1_0 + uint[] keys; +#else + ICryptoTransform cryptoTransform_; + + /// + /// Returns the 10 byte AUTH CODE to be appended immediately following the AES data stream. + /// + protected byte[] AESAuthCode; +#endif + + /// + /// Get/set the password used for encryption. + /// + /// When set to null or if the password is empty no encryption is performed + public string Password { + get { + return password; + } + set { + if ( (value != null) && (value.Length == 0) ) { + password = null; + } else { + password = value; + } + } + } + + /// + /// Encrypt a block of data + /// + /// + /// Data to encrypt. NOTE the original contents of the buffer are lost + /// + /// + /// Offset of first byte in buffer to encrypt + /// + /// + /// Number of bytes in buffer to encrypt + /// + protected void EncryptBlock(byte[] buffer, int offset, int length) + { +#if NETCF_1_0 + for (int i = offset; i < offset + length; ++i) { + byte oldbyte = buffer[i]; + buffer[i] ^= EncryptByte(); + UpdateKeys(oldbyte); + } +#else + cryptoTransform_.TransformBlock(buffer, 0, length, buffer, 0); +#endif + } + + /// + /// Initializes encryption keys based on given . + /// + /// The password. + protected void InitializePassword(string password) + { +#if NETCF_1_0 + keys = new uint[] { + 0x12345678, + 0x23456789, + 0x34567890 + }; + + byte[] rawPassword = ZipConstants.ConvertToArray(password); + + for (int i = 0; i < rawPassword.Length; ++i) { + UpdateKeys((byte)rawPassword[i]); + } + +#else + PkzipClassicManaged pkManaged = new PkzipClassicManaged(); + byte[] key = PkzipClassic.GenerateKeys(ZipConstants.ConvertToArray(password)); + cryptoTransform_ = pkManaged.CreateEncryptor(key, null); +#endif + } + +#if !NET_1_1 && !NETCF_2_0 + /// + /// Initializes encryption keys based on given password. + /// + protected void InitializeAESPassword(ZipEntry entry, string rawPassword, + out byte[] salt, out byte[] pwdVerifier) { + salt = new byte[entry.AESSaltLen]; + // Salt needs to be cryptographically random, and unique per file + if (_aesRnd == null) + _aesRnd = new RNGCryptoServiceProvider(); + _aesRnd.GetBytes(salt); + int blockSize = entry.AESKeySize / 8; // bits to bytes + + cryptoTransform_ = new ZipAESTransform(rawPassword, salt, blockSize, true); + pwdVerifier = ((ZipAESTransform)cryptoTransform_).PwdVerifier; + } +#endif + +#if NETCF_1_0 + + /// + /// Encrypt a single byte + /// + /// + /// The encrypted value + /// + protected byte EncryptByte() + { + uint temp = ((keys[2] & 0xFFFF) | 2); + return (byte)((temp * (temp ^ 1)) >> 8); + } + + /// + /// Update encryption keys + /// + protected void UpdateKeys(byte ch) + { + keys[0] = Crc32.ComputeCrc32(keys[0], ch); + keys[1] = keys[1] + (byte)keys[0]; + keys[1] = keys[1] * 134775813 + 1; + keys[2] = Crc32.ComputeCrc32(keys[2], (byte)(keys[1] >> 24)); + } +#endif + + #endregion + + #region Deflation Support + /// + /// Deflates everything in the input buffers. This will call + /// def.deflate() until all bytes from the input buffers + /// are processed. + /// + protected void Deflate() + { + while (!deflater_.IsNeedingInput) + { + int deflateCount = deflater_.Deflate(buffer_, 0, buffer_.Length); + + if (deflateCount <= 0) { + break; + } +#if NETCF_1_0 + if (keys != null) +#else + if (cryptoTransform_ != null) +#endif + { + EncryptBlock(buffer_, 0, deflateCount); + } + + baseOutputStream_.Write(buffer_, 0, deflateCount); + } + + if (!deflater_.IsNeedingInput) { + throw new SharpZipBaseException("DeflaterOutputStream can't deflate all input?"); + } + } + #endregion + + #region Stream Overrides + /// + /// Gets value indicating stream can be read from + /// + public override bool CanRead + { + get { + return false; + } + } + + /// + /// Gets a value indicating if seeking is supported for this stream + /// This property always returns false + /// + public override bool CanSeek { + get { + return false; + } + } + + /// + /// Get value indicating if this stream supports writing + /// + public override bool CanWrite { + get { + return baseOutputStream_.CanWrite; + } + } + + /// + /// Get current length of stream + /// + public override long Length { + get { + return baseOutputStream_.Length; + } + } + + /// + /// Gets the current position within the stream. + /// + /// Any attempt to set position + public override long Position { + get { + return baseOutputStream_.Position; + } + set { + throw new NotSupportedException("Position property not supported"); + } + } + + /// + /// Sets the current position of this stream to the given value. Not supported by this class! + /// + /// The offset relative to the to seek. + /// The to seek from. + /// The new position in the stream. + /// Any access + public override long Seek(long offset, SeekOrigin origin) + { + throw new NotSupportedException("DeflaterOutputStream Seek not supported"); + } + + /// + /// Sets the length of this stream to the given value. Not supported by this class! + /// + /// The new stream length. + /// Any access + public override void SetLength(long value) + { + throw new NotSupportedException("DeflaterOutputStream SetLength not supported"); + } + + /// + /// Read a byte from stream advancing position by one + /// + /// The byte read cast to an int. THe value is -1 if at the end of the stream. + /// Any access + public override int ReadByte() + { + throw new NotSupportedException("DeflaterOutputStream ReadByte not supported"); + } + + /// + /// Read a block of bytes from stream + /// + /// The buffer to store read data in. + /// The offset to start storing at. + /// The maximum number of bytes to read. + /// The actual number of bytes read. Zero if end of stream is detected. + /// Any access + public override int Read(byte[] buffer, int offset, int count) + { + throw new NotSupportedException("DeflaterOutputStream Read not supported"); + } + + /// + /// Asynchronous reads are not supported a NotSupportedException is always thrown + /// + /// The buffer to read into. + /// The offset to start storing data at. + /// The number of bytes to read + /// The async callback to use. + /// The state to use. + /// Returns an + /// Any access + public override IAsyncResult BeginRead(byte[] buffer, int offset, int count, AsyncCallback callback, object state) + { + throw new NotSupportedException("DeflaterOutputStream BeginRead not currently supported"); + } + + /// + /// Asynchronous writes arent supported, a NotSupportedException is always thrown + /// + /// The buffer to write. + /// The offset to begin writing at. + /// The number of bytes to write. + /// The to use. + /// The state object. + /// Returns an IAsyncResult. + /// Any access + public override IAsyncResult BeginWrite(byte[] buffer, int offset, int count, AsyncCallback callback, object state) + { + throw new NotSupportedException("BeginWrite is not supported"); + } + + /// + /// Flushes the stream by calling Flush on the deflater and then + /// on the underlying stream. This ensures that all bytes are flushed. + /// + public override void Flush() + { + deflater_.Flush(); + Deflate(); + baseOutputStream_.Flush(); + } + + /// + /// Calls and closes the underlying + /// stream when is true. + /// + public override void Close() + { + if ( !isClosed_ ) { + isClosed_ = true; + + try { + Finish(); +#if NETCF_1_0 + keys=null; +#else + if ( cryptoTransform_ != null ) { + GetAuthCodeIfAES(); + cryptoTransform_.Dispose(); + cryptoTransform_ = null; + } +#endif + } + finally { + if( isStreamOwner_ ) { + baseOutputStream_.Close(); + } + } + } + } + + private void GetAuthCodeIfAES() { +#if !NET_1_1 && !NETCF_2_0 + if (cryptoTransform_ is ZipAESTransform) { + AESAuthCode = ((ZipAESTransform)cryptoTransform_).GetAuthCode(); + } +#endif + } + + /// + /// Writes a single byte to the compressed output stream. + /// + /// + /// The byte value. + /// + public override void WriteByte(byte value) + { + byte[] b = new byte[1]; + b[0] = value; + Write(b, 0, 1); + } + + /// + /// Writes bytes from an array to the compressed stream. + /// + /// + /// The byte array + /// + /// + /// The offset into the byte array where to start. + /// + /// + /// The number of bytes to write. + /// + public override void Write(byte[] buffer, int offset, int count) + { + deflater_.SetInput(buffer, offset, count); + Deflate(); + } + #endregion + + #region Instance Fields + /// + /// This buffer is used temporarily to retrieve the bytes from the + /// deflater and write them to the underlying output stream. + /// + byte[] buffer_; + + /// + /// The deflater which is used to deflate the stream. + /// + protected Deflater deflater_; + + /// + /// Base stream the deflater depends on. + /// + protected Stream baseOutputStream_; + + bool isClosed_; + + bool isStreamOwner_ = true; + #endregion + + #region Static Fields + +#if !NET_1_1 && !NETCF_2_0 + // Static to help ensure that multiple files within a zip will get different random salt + private static RNGCryptoServiceProvider _aesRnd; +#endif + #endregion + } +} diff --git a/src/GitHub.Api/SharpZipLib/Zip/Compression/Streams/InflaterInputStream.cs b/src/GitHub.Api/SharpZipLib/Zip/Compression/Streams/InflaterInputStream.cs new file mode 100644 index 000000000..f1599041b --- /dev/null +++ b/src/GitHub.Api/SharpZipLib/Zip/Compression/Streams/InflaterInputStream.cs @@ -0,0 +1,732 @@ +// InflaterInputStream.cs +// +// Copyright (C) 2001 Mike Krueger +// Copyright (C) 2004 John Reilly +// +// This file was translated from java, it was part of the GNU Classpath +// Copyright (C) 2001 Free Software Foundation, Inc. +// +// This program is free software; you can redistribute it and/or +// modify it under the terms of the GNU General Public License +// as published by the Free Software Foundation; either version 2 +// of the License, or (at your option) any later version. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. +// +// You should have received a copy of the GNU General Public License +// along with this program; if not, write to the Free Software +// Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. +// +// Linking this library statically or dynamically with other modules is +// making a combined work based on this library. Thus, the terms and +// conditions of the GNU General Public License cover the whole +// combination. +// +// As a special exception, the copyright holders of this library give you +// permission to link this library with independent modules to produce an +// executable, regardless of the license terms of these independent +// modules, and to copy and distribute the resulting executable under +// terms of your choice, provided that you also meet, for each linked +// independent module, the terms and conditions of the license of that +// module. An independent module is a module which is not derived from +// or based on this library. If you modify this library, you may extend +// this exception to your version of the library, but you are not +// obligated to do so. If you do not wish to do so, delete this +// exception statement from your version. + +// HISTORY +// 11-08-2009 GeoffHart T9121 Added Multi-member gzip support + +using System; +using System.IO; + +#if !NETCF_1_0 +using System.Security.Cryptography; +#endif + +namespace GitHub.ICSharpCode.SharpZipLib.Zip.Compression.Streams +{ + + /// + /// An input buffer customised for use by + /// + /// + /// The buffer supports decryption of incoming data. + /// + public class InflaterInputBuffer + { + #region Constructors + /// + /// Initialise a new instance of with a default buffer size + /// + /// The stream to buffer. + public InflaterInputBuffer(Stream stream) : this(stream , 4096) + { + } + + /// + /// Initialise a new instance of + /// + /// The stream to buffer. + /// The size to use for the buffer + /// A minimum buffer size of 1KB is permitted. Lower sizes are treated as 1KB. + public InflaterInputBuffer(Stream stream, int bufferSize) + { + inputStream = stream; + if ( bufferSize < 1024 ) { + bufferSize = 1024; + } + rawData = new byte[bufferSize]; + clearText = rawData; + } + #endregion + + /// + /// Get the length of bytes bytes in the + /// + public int RawLength + { + get { + return rawLength; + } + } + + /// + /// Get the contents of the raw data buffer. + /// + /// This may contain encrypted data. + public byte[] RawData + { + get { + return rawData; + } + } + + /// + /// Get the number of useable bytes in + /// + public int ClearTextLength + { + get { + return clearTextLength; + } + } + + /// + /// Get the contents of the clear text buffer. + /// + public byte[] ClearText + { + get { + return clearText; + } + } + + /// + /// Get/set the number of bytes available + /// + public int Available + { + get { return available; } + set { available = value; } + } + + /// + /// Call passing the current clear text buffer contents. + /// + /// The inflater to set input for. + public void SetInflaterInput(Inflater inflater) + { + if ( available > 0 ) { + inflater.SetInput(clearText, clearTextLength - available, available); + available = 0; + } + } + + /// + /// Fill the buffer from the underlying input stream. + /// + public void Fill() + { + rawLength = 0; + int toRead = rawData.Length; + + while (toRead > 0) { + int count = inputStream.Read(rawData, rawLength, toRead); + if ( count <= 0 ) { + break; + } + rawLength += count; + toRead -= count; + } + +#if !NETCF_1_0 + if ( cryptoTransform != null ) { + clearTextLength = cryptoTransform.TransformBlock(rawData, 0, rawLength, clearText, 0); + } + else +#endif + { + clearTextLength = rawLength; + } + + available = clearTextLength; + } + + /// + /// Read a buffer directly from the input stream + /// + /// The buffer to fill + /// Returns the number of bytes read. + public int ReadRawBuffer(byte[] buffer) + { + return ReadRawBuffer(buffer, 0, buffer.Length); + } + + /// + /// Read a buffer directly from the input stream + /// + /// The buffer to read into + /// The offset to start reading data into. + /// The number of bytes to read. + /// Returns the number of bytes read. + public int ReadRawBuffer(byte[] outBuffer, int offset, int length) + { + if ( length < 0 ) { + throw new ArgumentOutOfRangeException("length"); + } + + int currentOffset = offset; + int currentLength = length; + + while ( currentLength > 0 ) { + if ( available <= 0 ) { + Fill(); + if (available <= 0) { + return 0; + } + } + int toCopy = Math.Min(currentLength, available); + System.Array.Copy(rawData, rawLength - (int)available, outBuffer, currentOffset, toCopy); + currentOffset += toCopy; + currentLength -= toCopy; + available -= toCopy; + } + return length; + } + + /// + /// Read clear text data from the input stream. + /// + /// The buffer to add data to. + /// The offset to start adding data at. + /// The number of bytes to read. + /// Returns the number of bytes actually read. + public int ReadClearTextBuffer(byte[] outBuffer, int offset, int length) + { + if ( length < 0 ) { + throw new ArgumentOutOfRangeException("length"); + } + + int currentOffset = offset; + int currentLength = length; + + while ( currentLength > 0 ) { + if ( available <= 0 ) { + Fill(); + if (available <= 0) { + return 0; + } + } + + int toCopy = Math.Min(currentLength, available); + Array.Copy(clearText, clearTextLength - (int)available, outBuffer, currentOffset, toCopy); + currentOffset += toCopy; + currentLength -= toCopy; + available -= toCopy; + } + return length; + } + + /// + /// Read a from the input stream. + /// + /// Returns the byte read. + public int ReadLeByte() + { + if (available <= 0) { + Fill(); + if (available <= 0) { + throw new ZipException("EOF in header"); + } + } + byte result = rawData[rawLength - available]; + available -= 1; + return result; + } + + /// + /// Read an in little endian byte order. + /// + /// The short value read case to an int. + public int ReadLeShort() + { + return ReadLeByte() | (ReadLeByte() << 8); + } + + /// + /// Read an in little endian byte order. + /// + /// The int value read. + public int ReadLeInt() + { + return ReadLeShort() | (ReadLeShort() << 16); + } + + /// + /// Read a in little endian byte order. + /// + /// The long value read. + public long ReadLeLong() + { + return (uint)ReadLeInt() | ((long)ReadLeInt() << 32); + } + +#if !NETCF_1_0 + /// + /// Get/set the to apply to any data. + /// + /// Set this value to null to have no transform applied. + public ICryptoTransform CryptoTransform + { + set { + cryptoTransform = value; + if ( cryptoTransform != null ) { + if ( rawData == clearText ) { + if ( internalClearText == null ) { + internalClearText = new byte[rawData.Length]; + } + clearText = internalClearText; + } + clearTextLength = rawLength; + if ( available > 0 ) { + cryptoTransform.TransformBlock(rawData, rawLength - available, available, clearText, rawLength - available); + } + } else { + clearText = rawData; + clearTextLength = rawLength; + } + } + } +#endif + + #region Instance Fields + int rawLength; + byte[] rawData; + + int clearTextLength; + byte[] clearText; +#if !NETCF_1_0 + byte[] internalClearText; +#endif + + int available; + +#if !NETCF_1_0 + ICryptoTransform cryptoTransform; +#endif + Stream inputStream; + #endregion + } + + /// + /// This filter stream is used to decompress data compressed using the "deflate" + /// format. The "deflate" format is described in RFC 1951. + /// + /// This stream may form the basis for other decompression filters, such + /// as the GZipInputStream. + /// + /// Author of the original java version : John Leuner. + /// + public class InflaterInputStream : Stream + { + #region Constructors + /// + /// Create an InflaterInputStream with the default decompressor + /// and a default buffer size of 4KB. + /// + /// + /// The InputStream to read bytes from + /// + public InflaterInputStream(Stream baseInputStream) + : this(baseInputStream, new Inflater(), 4096) + { + } + + /// + /// Create an InflaterInputStream with the specified decompressor + /// and a default buffer size of 4KB. + /// + /// + /// The source of input data + /// + /// + /// The decompressor used to decompress data read from baseInputStream + /// + public InflaterInputStream(Stream baseInputStream, Inflater inf) + : this(baseInputStream, inf, 4096) + { + } + + /// + /// Create an InflaterInputStream with the specified decompressor + /// and the specified buffer size. + /// + /// + /// The InputStream to read bytes from + /// + /// + /// The decompressor to use + /// + /// + /// Size of the buffer to use + /// + public InflaterInputStream(Stream baseInputStream, Inflater inflater, int bufferSize) + { + if (baseInputStream == null) { + throw new ArgumentNullException("baseInputStream"); + } + + if (inflater == null) { + throw new ArgumentNullException("inflater"); + } + + if (bufferSize <= 0) { + throw new ArgumentOutOfRangeException("bufferSize"); + } + + this.baseInputStream = baseInputStream; + this.inf = inflater; + + inputBuffer = new InflaterInputBuffer(baseInputStream, bufferSize); + } + + #endregion + + /// + /// Get/set flag indicating ownership of underlying stream. + /// When the flag is true will close the underlying stream also. + /// + /// + /// The default value is true. + /// + public bool IsStreamOwner + { + get { return isStreamOwner; } + set { isStreamOwner = value; } + } + + /// + /// Skip specified number of bytes of uncompressed data + /// + /// + /// Number of bytes to skip + /// + /// + /// The number of bytes skipped, zero if the end of + /// stream has been reached + /// + /// + /// The number of bytes to skip is less than or equal to zero. + /// + public long Skip(long count) + { + if (count <= 0) { + throw new ArgumentOutOfRangeException("count"); + } + + // v0.80 Skip by seeking if underlying stream supports it... + if (baseInputStream.CanSeek) { + baseInputStream.Seek(count, SeekOrigin.Current); + return count; + } + else { + int length = 2048; + if (count < length) { + length = (int) count; + } + + byte[] tmp = new byte[length]; + int readCount = 1; + long toSkip = count; + + while ((toSkip > 0) && (readCount > 0) ) { + if (toSkip < length) { + length = (int)toSkip; + } + + readCount = baseInputStream.Read(tmp, 0, length); + toSkip -= readCount; + } + + return count - toSkip; + } + } + + /// + /// Clear any cryptographic state. + /// + protected void StopDecrypting() + { +#if !NETCF_1_0 + inputBuffer.CryptoTransform = null; +#endif + } + + /// + /// Returns 0 once the end of the stream (EOF) has been reached. + /// Otherwise returns 1. + /// + public virtual int Available + { + get { + return inf.IsFinished ? 0 : 1; + } + } + + /// + /// Fills the buffer with more data to decompress. + /// + /// + /// Stream ends early + /// + protected void Fill() + { + // Protect against redundant calls + if (inputBuffer.Available <= 0) { + inputBuffer.Fill(); + if (inputBuffer.Available <= 0) { + throw new SharpZipBaseException("Unexpected EOF"); + } + } + inputBuffer.SetInflaterInput(inf); + } + + #region Stream Overrides + /// + /// Gets a value indicating whether the current stream supports reading + /// + public override bool CanRead + { + get { + return baseInputStream.CanRead; + } + } + + /// + /// Gets a value of false indicating seeking is not supported for this stream. + /// + public override bool CanSeek { + get { + return false; + } + } + + /// + /// Gets a value of false indicating that this stream is not writeable. + /// + public override bool CanWrite { + get { + return false; + } + } + + /// + /// A value representing the length of the stream in bytes. + /// + public override long Length { + get { + return inputBuffer.RawLength; + } + } + + /// + /// The current position within the stream. + /// Throws a NotSupportedException when attempting to set the position + /// + /// Attempting to set the position + public override long Position { + get { + return baseInputStream.Position; + } + set { + throw new NotSupportedException("InflaterInputStream Position not supported"); + } + } + + /// + /// Flushes the baseInputStream + /// + public override void Flush() + { + baseInputStream.Flush(); + } + + /// + /// Sets the position within the current stream + /// Always throws a NotSupportedException + /// + /// The relative offset to seek to. + /// The defining where to seek from. + /// The new position in the stream. + /// Any access + public override long Seek(long offset, SeekOrigin origin) + { + throw new NotSupportedException("Seek not supported"); + } + + /// + /// Set the length of the current stream + /// Always throws a NotSupportedException + /// + /// The new length value for the stream. + /// Any access + public override void SetLength(long value) + { + throw new NotSupportedException("InflaterInputStream SetLength not supported"); + } + + /// + /// Writes a sequence of bytes to stream and advances the current position + /// This method always throws a NotSupportedException + /// + /// Thew buffer containing data to write. + /// The offset of the first byte to write. + /// The number of bytes to write. + /// Any access + public override void Write(byte[] buffer, int offset, int count) + { + throw new NotSupportedException("InflaterInputStream Write not supported"); + } + + /// + /// Writes one byte to the current stream and advances the current position + /// Always throws a NotSupportedException + /// + /// The byte to write. + /// Any access + public override void WriteByte(byte value) + { + throw new NotSupportedException("InflaterInputStream WriteByte not supported"); + } + + /// + /// Entry point to begin an asynchronous write. Always throws a NotSupportedException. + /// + /// The buffer to write data from + /// Offset of first byte to write + /// The maximum number of bytes to write + /// The method to be called when the asynchronous write operation is completed + /// A user-provided object that distinguishes this particular asynchronous write request from other requests + /// An IAsyncResult that references the asynchronous write + /// Any access + public override IAsyncResult BeginWrite(byte[] buffer, int offset, int count, AsyncCallback callback, object state) + { + throw new NotSupportedException("InflaterInputStream BeginWrite not supported"); + } + + /// + /// Closes the input stream. When + /// is true the underlying stream is also closed. + /// + public override void Close() + { + if ( !isClosed ) { + isClosed = true; + if ( isStreamOwner ) { + baseInputStream.Close(); + } + } + } + + /// + /// Reads decompressed data into the provided buffer byte array + /// + /// + /// The array to read and decompress data into + /// + /// + /// The offset indicating where the data should be placed + /// + /// + /// The number of bytes to decompress + /// + /// The number of bytes read. Zero signals the end of stream + /// + /// Inflater needs a dictionary + /// + public override int Read(byte[] buffer, int offset, int count) + { + if (inf.IsNeedingDictionary) + { + throw new SharpZipBaseException("Need a dictionary"); + } + + int remainingBytes = count; + while (true) { + int bytesRead = inf.Inflate(buffer, offset, remainingBytes); + offset += bytesRead; + remainingBytes -= bytesRead; + + if (remainingBytes == 0 || inf.IsFinished) { + break; + } + + if ( inf.IsNeedingInput ) { + Fill(); + } + else if ( bytesRead == 0 ) { + throw new ZipException("Dont know what to do"); + } + } + return count - remainingBytes; + } + #endregion + + #region Instance Fields + /// + /// Decompressor for this stream + /// + protected Inflater inf; + + /// + /// Input buffer for this stream. + /// + protected InflaterInputBuffer inputBuffer; + + /// + /// Base stream the inflater reads from. + /// + private Stream baseInputStream; + + /// + /// The compressed size + /// + protected long csize; + + /// + /// Flag indicating wether this instance has been closed or not. + /// + bool isClosed; + + /// + /// Flag indicating wether this instance is designated the stream owner. + /// When closing if this flag is true the underlying stream is closed. + /// + bool isStreamOwner = true; + #endregion + } +} diff --git a/src/GitHub.Api/SharpZipLib/Zip/Compression/Streams/OutputWindow.cs b/src/GitHub.Api/SharpZipLib/Zip/Compression/Streams/OutputWindow.cs new file mode 100644 index 000000000..9114d0ca9 --- /dev/null +++ b/src/GitHub.Api/SharpZipLib/Zip/Compression/Streams/OutputWindow.cs @@ -0,0 +1,235 @@ +// OutputWindow.cs +// +// Copyright (C) 2001 Mike Krueger +// +// This file was translated from java, it was part of the GNU Classpath +// Copyright (C) 2001 Free Software Foundation, Inc. +// +// This program is free software; you can redistribute it and/or +// modify it under the terms of the GNU General Public License +// as published by the Free Software Foundation; either version 2 +// of the License, or (at your option) any later version. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. +// +// You should have received a copy of the GNU General Public License +// along with this program; if not, write to the Free Software +// Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. +// +// Linking this library statically or dynamically with other modules is +// making a combined work based on this library. Thus, the terms and +// conditions of the GNU General Public License cover the whole +// combination. +// +// As a special exception, the copyright holders of this library give you +// permission to link this library with independent modules to produce an +// executable, regardless of the license terms of these independent +// modules, and to copy and distribute the resulting executable under +// terms of your choice, provided that you also meet, for each linked +// independent module, the terms and conditions of the license of that +// module. An independent module is a module which is not derived from +// or based on this library. If you modify this library, you may extend +// this exception to your version of the library, but you are not +// obligated to do so. If you do not wish to do so, delete this +// exception statement from your version. + +using System; + + +namespace GitHub.ICSharpCode.SharpZipLib.Zip.Compression.Streams +{ + + /// + /// Contains the output from the Inflation process. + /// We need to have a window so that we can refer backwards into the output stream + /// to repeat stuff.
+ /// Author of the original java version : John Leuner + ///
+ public class OutputWindow + { + #region Constants + const int WindowSize = 1 << 15; + const int WindowMask = WindowSize - 1; + #endregion + + #region Instance Fields + byte[] window = new byte[WindowSize]; //The window is 2^15 bytes + int windowEnd; + int windowFilled; + #endregion + + /// + /// Write a byte to this output window + /// + /// value to write + /// + /// if window is full + /// + public void Write(int value) + { + if (windowFilled++ == WindowSize) { + throw new InvalidOperationException("Window full"); + } + window[windowEnd++] = (byte) value; + windowEnd &= WindowMask; + } + + + private void SlowRepeat(int repStart, int length, int distance) + { + while (length-- > 0) { + window[windowEnd++] = window[repStart++]; + windowEnd &= WindowMask; + repStart &= WindowMask; + } + } + + /// + /// Append a byte pattern already in the window itself + /// + /// length of pattern to copy + /// distance from end of window pattern occurs + /// + /// If the repeated data overflows the window + /// + public void Repeat(int length, int distance) + { + if ((windowFilled += length) > WindowSize) { + throw new InvalidOperationException("Window full"); + } + + int repStart = (windowEnd - distance) & WindowMask; + int border = WindowSize - length; + if ( (repStart <= border) && (windowEnd < border) ) { + if (length <= distance) { + System.Array.Copy(window, repStart, window, windowEnd, length); + windowEnd += length; + } else { + // We have to copy manually, since the repeat pattern overlaps. + while (length-- > 0) { + window[windowEnd++] = window[repStart++]; + } + } + } else { + SlowRepeat(repStart, length, distance); + } + } + + /// + /// Copy from input manipulator to internal window + /// + /// source of data + /// length of data to copy + /// the number of bytes copied + public int CopyStored(StreamManipulator input, int length) + { + length = Math.Min(Math.Min(length, WindowSize - windowFilled), input.AvailableBytes); + int copied; + + int tailLen = WindowSize - windowEnd; + if (length > tailLen) { + copied = input.CopyBytes(window, windowEnd, tailLen); + if (copied == tailLen) { + copied += input.CopyBytes(window, 0, length - tailLen); + } + } else { + copied = input.CopyBytes(window, windowEnd, length); + } + + windowEnd = (windowEnd + copied) & WindowMask; + windowFilled += copied; + return copied; + } + + /// + /// Copy dictionary to window + /// + /// source dictionary + /// offset of start in source dictionary + /// length of dictionary + /// + /// If window isnt empty + /// + public void CopyDict(byte[] dictionary, int offset, int length) + { + if ( dictionary == null ) { + throw new ArgumentNullException("dictionary"); + } + + if (windowFilled > 0) { + throw new InvalidOperationException(); + } + + if (length > WindowSize) { + offset += length - WindowSize; + length = WindowSize; + } + System.Array.Copy(dictionary, offset, window, 0, length); + windowEnd = length & WindowMask; + } + + /// + /// Get remaining unfilled space in window + /// + /// Number of bytes left in window + public int GetFreeSpace() + { + return WindowSize - windowFilled; + } + + /// + /// Get bytes available for output in window + /// + /// Number of bytes filled + public int GetAvailable() + { + return windowFilled; + } + + /// + /// Copy contents of window to output + /// + /// buffer to copy to + /// offset to start at + /// number of bytes to count + /// The number of bytes copied + /// + /// If a window underflow occurs + /// + public int CopyOutput(byte[] output, int offset, int len) + { + int copyEnd = windowEnd; + if (len > windowFilled) { + len = windowFilled; + } else { + copyEnd = (windowEnd - windowFilled + len) & WindowMask; + } + + int copied = len; + int tailLen = len - copyEnd; + + if (tailLen > 0) { + System.Array.Copy(window, WindowSize - tailLen, output, offset, tailLen); + offset += tailLen; + len = copyEnd; + } + System.Array.Copy(window, copyEnd - len, output, offset, len); + windowFilled -= copied; + if (windowFilled < 0) { + throw new InvalidOperationException(); + } + return copied; + } + + /// + /// Reset by clearing window so GetAvailable returns 0 + /// + public void Reset() + { + windowFilled = windowEnd = 0; + } + } +} diff --git a/src/GitHub.Api/SharpZipLib/Zip/Compression/Streams/StreamManipulator.cs b/src/GitHub.Api/SharpZipLib/Zip/Compression/Streams/StreamManipulator.cs new file mode 100644 index 000000000..0a30e6228 --- /dev/null +++ b/src/GitHub.Api/SharpZipLib/Zip/Compression/Streams/StreamManipulator.cs @@ -0,0 +1,297 @@ +// StreamManipulator.cs +// +// Copyright (C) 2001 Mike Krueger +// +// This file was translated from java, it was part of the GNU Classpath +// Copyright (C) 2001 Free Software Foundation, Inc. +// +// This program is free software; you can redistribute it and/or +// modify it under the terms of the GNU General Public License +// as published by the Free Software Foundation; either version 2 +// of the License, or (at your option) any later version. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. +// +// You should have received a copy of the GNU General Public License +// along with this program; if not, write to the Free Software +// Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. +// +// Linking this library statically or dynamically with other modules is +// making a combined work based on this library. Thus, the terms and +// conditions of the GNU General Public License cover the whole +// combination. +// +// As a special exception, the copyright holders of this library give you +// permission to link this library with independent modules to produce an +// executable, regardless of the license terms of these independent +// modules, and to copy and distribute the resulting executable under +// terms of your choice, provided that you also meet, for each linked +// independent module, the terms and conditions of the license of that +// module. An independent module is a module which is not derived from +// or based on this library. If you modify this library, you may extend +// this exception to your version of the library, but you are not +// obligated to do so. If you do not wish to do so, delete this +// exception statement from your version. + +using System; + +namespace GitHub.ICSharpCode.SharpZipLib.Zip.Compression.Streams +{ + + /// + /// This class allows us to retrieve a specified number of bits from + /// the input buffer, as well as copy big byte blocks. + /// + /// It uses an int buffer to store up to 31 bits for direct + /// manipulation. This guarantees that we can get at least 16 bits, + /// but we only need at most 15, so this is all safe. + /// + /// There are some optimizations in this class, for example, you must + /// never peek more than 8 bits more than needed, and you must first + /// peek bits before you may drop them. This is not a general purpose + /// class but optimized for the behaviour of the Inflater. + /// + /// authors of the original java version : John Leuner, Jochen Hoenicke + /// + public class StreamManipulator + { + #region Constructors + /// + /// Constructs a default StreamManipulator with all buffers empty + /// + public StreamManipulator() + { + } + #endregion + + /// + /// Get the next sequence of bits but don't increase input pointer. bitCount must be + /// less or equal 16 and if this call succeeds, you must drop + /// at least n - 8 bits in the next call. + /// + /// The number of bits to peek. + /// + /// the value of the bits, or -1 if not enough bits available. */ + /// + public int PeekBits(int bitCount) + { + if (bitsInBuffer_ < bitCount) { + if (windowStart_ == windowEnd_) { + return -1; // ok + } + buffer_ |= (uint)((window_[windowStart_++] & 0xff | + (window_[windowStart_++] & 0xff) << 8) << bitsInBuffer_); + bitsInBuffer_ += 16; + } + return (int)(buffer_ & ((1 << bitCount) - 1)); + } + + /// + /// Drops the next n bits from the input. You should have called PeekBits + /// with a bigger or equal n before, to make sure that enough bits are in + /// the bit buffer. + /// + /// The number of bits to drop. + public void DropBits(int bitCount) + { + buffer_ >>= bitCount; + bitsInBuffer_ -= bitCount; + } + + /// + /// Gets the next n bits and increases input pointer. This is equivalent + /// to followed by , except for correct error handling. + /// + /// The number of bits to retrieve. + /// + /// the value of the bits, or -1 if not enough bits available. + /// + public int GetBits(int bitCount) + { + int bits = PeekBits(bitCount); + if (bits >= 0) { + DropBits(bitCount); + } + return bits; + } + + /// + /// Gets the number of bits available in the bit buffer. This must be + /// only called when a previous PeekBits() returned -1. + /// + /// + /// the number of bits available. + /// + public int AvailableBits { + get { + return bitsInBuffer_; + } + } + + /// + /// Gets the number of bytes available. + /// + /// + /// The number of bytes available. + /// + public int AvailableBytes { + get { + return windowEnd_ - windowStart_ + (bitsInBuffer_ >> 3); + } + } + + /// + /// Skips to the next byte boundary. + /// + public void SkipToByteBoundary() + { + buffer_ >>= (bitsInBuffer_ & 7); + bitsInBuffer_ &= ~7; + } + + /// + /// Returns true when SetInput can be called + /// + public bool IsNeedingInput { + get { + return windowStart_ == windowEnd_; + } + } + + /// + /// Copies bytes from input buffer to output buffer starting + /// at output[offset]. You have to make sure, that the buffer is + /// byte aligned. If not enough bytes are available, copies fewer + /// bytes. + /// + /// + /// The buffer to copy bytes to. + /// + /// + /// The offset in the buffer at which copying starts + /// + /// + /// The length to copy, 0 is allowed. + /// + /// + /// The number of bytes copied, 0 if no bytes were available. + /// + /// + /// Length is less than zero + /// + /// + /// Bit buffer isnt byte aligned + /// + public int CopyBytes(byte[] output, int offset, int length) + { + if (length < 0) { + throw new ArgumentOutOfRangeException("length"); + } + + if ((bitsInBuffer_ & 7) != 0) { + // bits_in_buffer may only be 0 or a multiple of 8 + throw new InvalidOperationException("Bit buffer is not byte aligned!"); + } + + int count = 0; + while ((bitsInBuffer_ > 0) && (length > 0)) { + output[offset++] = (byte) buffer_; + buffer_ >>= 8; + bitsInBuffer_ -= 8; + length--; + count++; + } + + if (length == 0) { + return count; + } + + int avail = windowEnd_ - windowStart_; + if (length > avail) { + length = avail; + } + System.Array.Copy(window_, windowStart_, output, offset, length); + windowStart_ += length; + + if (((windowStart_ - windowEnd_) & 1) != 0) { + // We always want an even number of bytes in input, see peekBits + buffer_ = (uint)(window_[windowStart_++] & 0xff); + bitsInBuffer_ = 8; + } + return count + length; + } + + /// + /// Resets state and empties internal buffers + /// + public void Reset() + { + buffer_ = 0; + windowStart_ = windowEnd_ = bitsInBuffer_ = 0; + } + + /// + /// Add more input for consumption. + /// Only call when IsNeedingInput returns true + /// + /// data to be input + /// offset of first byte of input + /// number of bytes of input to add. + public void SetInput(byte[] buffer, int offset, int count) + { + if ( buffer == null ) { + throw new ArgumentNullException("buffer"); + } + + if ( offset < 0 ) { +#if NETCF_1_0 + throw new ArgumentOutOfRangeException("offset"); +#else + throw new ArgumentOutOfRangeException("offset", "Cannot be negative"); +#endif + } + + if ( count < 0 ) { +#if NETCF_1_0 + throw new ArgumentOutOfRangeException("count"); +#else + throw new ArgumentOutOfRangeException("count", "Cannot be negative"); +#endif + } + + if (windowStart_ < windowEnd_) { + throw new InvalidOperationException("Old input was not completely processed"); + } + + int end = offset + count; + + // We want to throw an ArrayIndexOutOfBoundsException early. + // Note the check also handles integer wrap around. + if ((offset > end) || (end > buffer.Length) ) { + throw new ArgumentOutOfRangeException("count"); + } + + if ((count & 1) != 0) { + // We always want an even number of bytes in input, see PeekBits + buffer_ |= (uint)((buffer[offset++] & 0xff) << bitsInBuffer_); + bitsInBuffer_ += 8; + } + + window_ = buffer; + windowStart_ = offset; + windowEnd_ = end; + } + + #region Instance Fields + private byte[] window_; + private int windowStart_; + private int windowEnd_; + + private uint buffer_; + private int bitsInBuffer_; + #endregion + } +} diff --git a/src/GitHub.Api/SharpZipLib/Zip/FastZip.cs b/src/GitHub.Api/SharpZipLib/Zip/FastZip.cs new file mode 100644 index 000000000..97c063ebd --- /dev/null +++ b/src/GitHub.Api/SharpZipLib/Zip/FastZip.cs @@ -0,0 +1,729 @@ +// FastZip.cs +// +// Copyright 2005 John Reilly +// +// This program is free software; you can redistribute it and/or +// modify it under the terms of the GNU General Public License +// as published by the Free Software Foundation; either version 2 +// of the License, or (at your option) any later version. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. +// +// You should have received a copy of the GNU General Public License +// along with this program; if not, write to the Free Software +// Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. +// +// Linking this library statically or dynamically with other modules is +// making a combined work based on this library. Thus, the terms and +// conditions of the GNU General Public License cover the whole +// combination. +// +// As a special exception, the copyright holders of this library give you +// permission to link this library with independent modules to produce an +// executable, regardless of the license terms of these independent +// modules, and to copy and distribute the resulting executable under +// terms of your choice, provided that you also meet, for each linked +// independent module, the terms and conditions of the license of that +// module. An independent module is a module which is not derived from +// or based on this library. If you modify this library, you may extend +// this exception to your version of the library, but you are not +// obligated to do so. If you do not wish to do so, delete this +// exception statement from your version. + +using System; +using System.IO; +using GitHub.ICSharpCode.SharpZipLib.Core; + +namespace GitHub.ICSharpCode.SharpZipLib.Zip +{ + /// + /// FastZipEvents supports all events applicable to FastZip operations. + /// + public class FastZipEvents + { + /// + /// Delegate to invoke when processing directories. + /// + public ProcessDirectoryHandler ProcessDirectory; + + /// + /// Delegate to invoke when processing files. + /// + public ProcessFileHandler ProcessFile; + + /// + /// Delegate to invoke during processing of files. + /// + public ProgressHandler Progress; + + /// + /// Delegate to invoke when processing for a file has been completed. + /// + public CompletedFileHandler CompletedFile; + + /// + /// Delegate to invoke when processing directory failures. + /// + public DirectoryFailureHandler DirectoryFailure; + + /// + /// Delegate to invoke when processing file failures. + /// + public FileFailureHandler FileFailure; + + /// + /// Raise the directory failure event. + /// + /// The directory causing the failure. + /// The exception for this event. + /// A boolean indicating if execution should continue or not. + public bool OnDirectoryFailure(string directory, Exception e) + { + bool result = false; + DirectoryFailureHandler handler = DirectoryFailure; + + if ( handler != null ) { + ScanFailureEventArgs args = new ScanFailureEventArgs(directory, e); + handler(this, args); + result = args.ContinueRunning; + } + return result; + } + + /// + /// Fires the file failure handler delegate. + /// + /// The file causing the failure. + /// The exception for this failure. + /// A boolean indicating if execution should continue or not. + public bool OnFileFailure(string file, Exception e) + { + FileFailureHandler handler = FileFailure; + bool result = (handler != null); + + if ( result ) { + ScanFailureEventArgs args = new ScanFailureEventArgs(file, e); + handler(this, args); + result = args.ContinueRunning; + } + return result; + } + + /// + /// Fires the ProcessFile delegate. + /// + /// The file being processed. + /// A boolean indicating if execution should continue or not. + public bool OnProcessFile(string file) + { + bool result = true; + ProcessFileHandler handler = ProcessFile; + + if ( handler != null ) { + ScanEventArgs args = new ScanEventArgs(file); + handler(this, args); + result = args.ContinueRunning; + } + return result; + } + + /// + /// Fires the delegate + /// + /// The file whose processing has been completed. + /// A boolean indicating if execution should continue or not. + public bool OnCompletedFile(string file) + { + bool result = true; + CompletedFileHandler handler = CompletedFile; + if ( handler != null ) { + ScanEventArgs args = new ScanEventArgs(file); + handler(this, args); + result = args.ContinueRunning; + } + return result; + } + + /// + /// Fires the process directory delegate. + /// + /// The directory being processed. + /// Flag indicating if the directory has matching files as determined by the current filter. + /// A of true if the operation should continue; false otherwise. + public bool OnProcessDirectory(string directory, bool hasMatchingFiles) + { + bool result = true; + ProcessDirectoryHandler handler = ProcessDirectory; + if ( handler != null ) { + DirectoryEventArgs args = new DirectoryEventArgs(directory, hasMatchingFiles); + handler(this, args); + result = args.ContinueRunning; + } + return result; + } + + /// + /// The minimum timespan between events. + /// + /// The minimum period of time between events. + /// + /// The default interval is three seconds. + public TimeSpan ProgressInterval + { + get { return progressInterval_; } + set { progressInterval_ = value; } + } + + #region Instance Fields + TimeSpan progressInterval_ = TimeSpan.FromSeconds(3); + #endregion + } + + /// + /// FastZip provides facilities for creating and extracting zip files. + /// + public class FastZip + { + #region Enumerations + /// + /// Defines the desired handling when overwriting files during extraction. + /// + public enum Overwrite + { + /// + /// Prompt the user to confirm overwriting + /// + Prompt, + /// + /// Never overwrite files. + /// + Never, + /// + /// Always overwrite files. + /// + Always + } + #endregion + + #region Constructors + /// + /// Initialise a default instance of . + /// + public FastZip() + { + } + + /// + /// Initialise a new instance of + /// + /// The events to use during operations. + public FastZip(FastZipEvents events) + { + events_ = events; + } + #endregion + + #region Properties + /// + /// Get/set a value indicating wether empty directories should be created. + /// + public bool CreateEmptyDirectories + { + get { return createEmptyDirectories_; } + set { createEmptyDirectories_ = value; } + } + +#if !NETCF_1_0 + /// + /// Get / set the password value. + /// + public string Password + { + get { return password_; } + set { password_ = value; } + } +#endif + + /// + /// Get or set the active when creating Zip files. + /// + /// + public INameTransform NameTransform + { + get { return entryFactory_.NameTransform; } + set { + entryFactory_.NameTransform = value; + } + } + + /// + /// Get or set the active when creating Zip files. + /// + public IEntryFactory EntryFactory + { + get { return entryFactory_; } + set { + if ( value == null ) { + entryFactory_ = new ZipEntryFactory(); + } + else { + entryFactory_ = value; + } + } + } + + /// + /// Gets or sets the setting for Zip64 handling when writing. + /// + /// + /// The default value is dynamic which is not backwards compatible with old + /// programs and can cause problems with XP's built in compression which cant + /// read Zip64 archives. However it does avoid the situation were a large file + /// is added and cannot be completed correctly. + /// NOTE: Setting the size for entries before they are added is the best solution! + /// By default the EntryFactory used by FastZip will set fhe file size. + /// + public UseZip64 UseZip64 + { + get { return useZip64_; } + set { useZip64_ = value; } + } + + /// + /// Get/set a value indicating wether file dates and times should + /// be restored when extracting files from an archive. + /// + /// The default value is false. + public bool RestoreDateTimeOnExtract + { + get { + return restoreDateTimeOnExtract_; + } + set { + restoreDateTimeOnExtract_ = value; + } + } + + /// + /// Get/set a value indicating wether file attributes should + /// be restored during extract operations + /// + public bool RestoreAttributesOnExtract + { + get { return restoreAttributesOnExtract_; } + set { restoreAttributesOnExtract_ = value; } + } + #endregion + + #region Delegates + /// + /// Delegate called when confirming overwriting of files. + /// + public delegate bool ConfirmOverwriteDelegate(string fileName); + #endregion + + #region CreateZip + /// + /// Create a zip file. + /// + /// The name of the zip file to create. + /// The directory to source files from. + /// True to recurse directories, false for no recursion. + /// The file filter to apply. + /// The directory filter to apply. + public void CreateZip(string zipFileName, string sourceDirectory, + bool recurse, string fileFilter, string directoryFilter) + { + CreateZip(File.Create(zipFileName), sourceDirectory, recurse, fileFilter, directoryFilter); + } + + /// + /// Create a zip file/archive. + /// + /// The name of the zip file to create. + /// The directory to obtain files and directories from. + /// True to recurse directories, false for no recursion. + /// The file filter to apply. + public void CreateZip(string zipFileName, string sourceDirectory, bool recurse, string fileFilter) + { + CreateZip(File.Create(zipFileName), sourceDirectory, recurse, fileFilter, null); + } + + /// + /// Create a zip archive sending output to the passed. + /// + /// The stream to write archive data to. + /// The directory to source files from. + /// True to recurse directories, false for no recursion. + /// The file filter to apply. + /// The directory filter to apply. + /// The is closed after creation. + public void CreateZip(Stream outputStream, string sourceDirectory, bool recurse, string fileFilter, string directoryFilter) + { + NameTransform = new ZipNameTransform(sourceDirectory); + sourceDirectory_ = sourceDirectory; + + using ( outputStream_ = new ZipOutputStream(outputStream) ) { + +#if !NETCF_1_0 + if ( password_ != null ) { + outputStream_.Password = password_; + } +#endif + + outputStream_.UseZip64 = UseZip64; + FileSystemScanner scanner = new FileSystemScanner(fileFilter, directoryFilter); + scanner.ProcessFile += new ProcessFileHandler(ProcessFile); + if ( this.CreateEmptyDirectories ) { + scanner.ProcessDirectory += new ProcessDirectoryHandler(ProcessDirectory); + } + + if (events_ != null) { + if ( events_.FileFailure != null ) { + scanner.FileFailure += events_.FileFailure; + } + + if ( events_.DirectoryFailure != null ) { + scanner.DirectoryFailure += events_.DirectoryFailure; + } + } + + scanner.Scan(sourceDirectory, recurse); + } + } + + #endregion + + #region ExtractZip + /// + /// Extract the contents of a zip file. + /// + /// The zip file to extract from. + /// The directory to save extracted information in. + /// A filter to apply to files. + public void ExtractZip(string zipFileName, string targetDirectory, string fileFilter) + { + ExtractZip(zipFileName, targetDirectory, Overwrite.Always, null, fileFilter, null, restoreDateTimeOnExtract_); + } + + /// + /// Extract the contents of a zip file. + /// + /// The zip file to extract from. + /// The directory to save extracted information in. + /// The style of overwriting to apply. + /// A delegate to invoke when confirming overwriting. + /// A filter to apply to files. + /// A filter to apply to directories. + /// Flag indicating whether to restore the date and time for extracted files. + public void ExtractZip(string zipFileName, string targetDirectory, + Overwrite overwrite, ConfirmOverwriteDelegate confirmDelegate, + string fileFilter, string directoryFilter, bool restoreDateTime) + { + Stream inputStream = File.Open(zipFileName, FileMode.Open, FileAccess.Read, FileShare.Read); + ExtractZip(inputStream, targetDirectory, overwrite, confirmDelegate, fileFilter, directoryFilter, restoreDateTime, true); + } + + /// + /// Extract the contents of a zip file held in a stream. + /// + /// The seekable input stream containing the zip to extract from. + /// The directory to save extracted information in. + /// The style of overwriting to apply. + /// A delegate to invoke when confirming overwriting. + /// A filter to apply to files. + /// A filter to apply to directories. + /// Flag indicating whether to restore the date and time for extracted files. + /// Flag indicating whether the inputStream will be closed by this method. + public void ExtractZip(Stream inputStream, string targetDirectory, + Overwrite overwrite, ConfirmOverwriteDelegate confirmDelegate, + string fileFilter, string directoryFilter, bool restoreDateTime, + bool isStreamOwner) + { + if ((overwrite == Overwrite.Prompt) && (confirmDelegate == null)) { + throw new ArgumentNullException("confirmDelegate"); + } + + continueRunning_ = true; + overwrite_ = overwrite; + confirmDelegate_ = confirmDelegate; + extractNameTransform_ = new WindowsNameTransform(targetDirectory); + + fileFilter_ = new NameFilter(fileFilter); + directoryFilter_ = new NameFilter(directoryFilter); + restoreDateTimeOnExtract_ = restoreDateTime; + + using (zipFile_ = new ZipFile(inputStream)) { + +#if !NETCF_1_0 + if (password_ != null) { + zipFile_.Password = password_; + } +#endif + zipFile_.IsStreamOwner = isStreamOwner; + System.Collections.IEnumerator enumerator = zipFile_.GetEnumerator(); + while (continueRunning_ && enumerator.MoveNext()) { + ZipEntry entry = (ZipEntry)enumerator.Current; + if (entry.IsFile) + { + // TODO Path.GetDirectory can fail here on invalid characters. + if (directoryFilter_.IsMatch(Path.GetDirectoryName(entry.Name)) && fileFilter_.IsMatch(entry.Name)) { + ExtractEntry(entry); + } + } + else if (entry.IsDirectory) { + if (directoryFilter_.IsMatch(entry.Name) && CreateEmptyDirectories) { + ExtractEntry(entry); + } + } + else { + // Do nothing for volume labels etc... + } + } + } + } + #endregion + + #region Internal Processing + void ProcessDirectory(object sender, DirectoryEventArgs e) + { + if ( !e.HasMatchingFiles && CreateEmptyDirectories ) { + if ( events_ != null ) { + events_.OnProcessDirectory(e.Name, e.HasMatchingFiles); + } + + if ( e.ContinueRunning ) { + if (e.Name != sourceDirectory_) { + ZipEntry entry = entryFactory_.MakeDirectoryEntry(e.Name); + outputStream_.PutNextEntry(entry); + } + } + } + } + + void ProcessFile(object sender, ScanEventArgs e) + { + if ( (events_ != null) && (events_.ProcessFile != null) ) { + events_.ProcessFile(sender, e); + } + + if ( e.ContinueRunning ) { + try { + // The open below is equivalent to OpenRead which gaurantees that if opened the + // file will not be changed by subsequent openers, but precludes opening in some cases + // were it could succeed. + using (FileStream stream = File.Open(e.Name, FileMode.Open, FileAccess.Read, FileShare.Read)) { + ZipEntry entry = entryFactory_.MakeFileEntry(e.Name); + outputStream_.PutNextEntry(entry); + AddFileContents(e.Name, stream); + } + } + catch(Exception ex) { + if (events_ != null) { + continueRunning_ = events_.OnFileFailure(e.Name, ex); + } + else { + continueRunning_ = false; + throw; + } + } + } + } + + void AddFileContents(string name, Stream stream) + { + if( stream==null ) { + throw new ArgumentNullException("stream"); + } + + if( buffer_==null ) { + buffer_=new byte[4096]; + } + + if( (events_!=null)&&(events_.Progress!=null) ) { + StreamUtils.Copy(stream, outputStream_, buffer_, + events_.Progress, events_.ProgressInterval, this, name); + } + else { + StreamUtils.Copy(stream, outputStream_, buffer_); + } + + if( events_!=null ) { + continueRunning_=events_.OnCompletedFile(name); + } + } + + void ExtractFileEntry(ZipEntry entry, string targetName) + { + bool proceed = true; + if ( overwrite_ != Overwrite.Always ) { + if ( File.Exists(targetName) ) { + if ( (overwrite_ == Overwrite.Prompt) && (confirmDelegate_ != null) ) { + proceed = confirmDelegate_(targetName); + } + else { + proceed = false; + } + } + } + + if ( proceed ) { + if ( events_ != null ) { + continueRunning_ = events_.OnProcessFile(entry.Name); + } + + if ( continueRunning_ ) { + try { + using ( FileStream outputStream = File.Create(targetName) ) { + if ( buffer_ == null ) { + buffer_ = new byte[4096]; + } + if ((events_ != null) && (events_.Progress != null)) + { + StreamUtils.Copy(zipFile_.GetInputStream(entry), outputStream, buffer_, + events_.Progress, events_.ProgressInterval, this, entry.Name, entry.Size); + } + else + { + StreamUtils.Copy(zipFile_.GetInputStream(entry), outputStream, buffer_); + } + + if (events_ != null) { + continueRunning_ = events_.OnCompletedFile(entry.Name); + } + } + +#if !NETCF_1_0 && !NETCF_2_0 + if ( restoreDateTimeOnExtract_ ) { + File.SetLastWriteTime(targetName, entry.DateTime); + } + + if ( RestoreAttributesOnExtract && entry.IsDOSEntry && (entry.ExternalFileAttributes != -1)) { + FileAttributes fileAttributes = (FileAttributes) entry.ExternalFileAttributes; + // TODO: FastZip - Setting of other file attributes on extraction is a little trickier. + fileAttributes &= (FileAttributes.Archive | FileAttributes.Normal | FileAttributes.ReadOnly | FileAttributes.Hidden); + File.SetAttributes(targetName, fileAttributes); + } +#endif + } + catch(Exception ex) { + if ( events_ != null ) { + continueRunning_ = events_.OnFileFailure(targetName, ex); + } + else { + continueRunning_ = false; + throw; + } + } + } + } + } + + void ExtractEntry(ZipEntry entry) + { + bool doExtraction = entry.IsCompressionMethodSupported(); + string targetName = entry.Name; + + if ( doExtraction ) { + if ( entry.IsFile ) { + targetName = extractNameTransform_.TransformFile(targetName); + } + else if ( entry.IsDirectory ) { + targetName = extractNameTransform_.TransformDirectory(targetName); + } + + doExtraction = !((targetName == null) || (targetName.Length == 0)); + } + + // TODO: Fire delegate/throw exception were compression method not supported, or name is invalid? + + string dirName = null; + + if ( doExtraction ) { + if ( entry.IsDirectory ) { + dirName = targetName; + } + else { + dirName = Path.GetDirectoryName(Path.GetFullPath(targetName)); + } + } + + if ( doExtraction && !Directory.Exists(dirName) ) { + if ( !entry.IsDirectory || CreateEmptyDirectories ) { + try { + Directory.CreateDirectory(dirName); + } + catch (Exception ex) { + doExtraction = false; + if ( events_ != null ) { + if ( entry.IsDirectory ) { + continueRunning_ = events_.OnDirectoryFailure(targetName, ex); + } + else { + continueRunning_ = events_.OnFileFailure(targetName, ex); + } + } + else { + continueRunning_ = false; + throw; + } + } + } + } + + if ( doExtraction && entry.IsFile ) { + ExtractFileEntry(entry, targetName); + } + } + + static int MakeExternalAttributes(FileInfo info) + { + return (int)info.Attributes; + } + +#if NET_1_0 || NET_1_1 || NETCF_1_0 + static bool NameIsValid(string name) + { + return (name != null) && + (name.Length > 0) && + (name.IndexOfAny(Path.InvalidPathChars) < 0); + } +#else + static bool NameIsValid(string name) + { + return (name != null) && + (name.Length > 0) && + (name.IndexOfAny(Path.GetInvalidPathChars()) < 0); + } +#endif + #endregion + + #region Instance Fields + bool continueRunning_; + byte[] buffer_; + ZipOutputStream outputStream_; + ZipFile zipFile_; + string sourceDirectory_; + NameFilter fileFilter_; + NameFilter directoryFilter_; + Overwrite overwrite_; + ConfirmOverwriteDelegate confirmDelegate_; + + bool restoreDateTimeOnExtract_; + bool restoreAttributesOnExtract_; + bool createEmptyDirectories_; + FastZipEvents events_; + IEntryFactory entryFactory_ = new ZipEntryFactory(); + INameTransform extractNameTransform_; + UseZip64 useZip64_=UseZip64.Dynamic; + +#if !NETCF_1_0 + string password_; +#endif + + #endregion + } +} diff --git a/src/GitHub.Api/SharpZipLib/Zip/IEntryFactory.cs b/src/GitHub.Api/SharpZipLib/Zip/IEntryFactory.cs new file mode 100644 index 000000000..31c6f40d3 --- /dev/null +++ b/src/GitHub.Api/SharpZipLib/Zip/IEntryFactory.cs @@ -0,0 +1,82 @@ +// IEntryFactory.cs +// +// Copyright 2006 John Reilly +// +// Copyright (C) 2001 Free Software Foundation, Inc. +// +// This program is free software; you can redistribute it and/or +// modify it under the terms of the GNU General Public License +// as published by the Free Software Foundation; either version 2 +// of the License, or (at your option) any later version. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. +// +// You should have received a copy of the GNU General Public License +// along with this program; if not, write to the Free Software +// Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. +// +// Linking this library statically or dynamically with other modules is +// making a combined work based on this library. Thus, the terms and +// conditions of the GNU General Public License cover the whole +// combination. +// +// As a special exception, the copyright holders of this library give you +// permission to link this library with independent modules to produce an +// executable, regardless of the license terms of these independent +// modules, and to copy and distribute the resulting executable under +// terms of your choice, provided that you also meet, for each linked +// independent module, the terms and conditions of the license of that +// module. An independent module is a module which is not derived from +// or based on this library. If you modify this library, you may extend +// this exception to your version of the library, but you are not +// obligated to do so. If you do not wish to do so, delete this +// exception statement from your version. + +using GitHub.ICSharpCode.SharpZipLib.Core; + +namespace GitHub.ICSharpCode.SharpZipLib.Zip +{ + /// + /// Defines factory methods for creating new values. + /// + public interface IEntryFactory + { + /// + /// Create a for a file given its name + /// + /// The name of the file to create an entry for. + /// Returns a file entry based on the passed. + ZipEntry MakeFileEntry(string fileName); + + /// + /// Create a for a file given its name + /// + /// The name of the file to create an entry for. + /// If true get details from the file system if the file exists. + /// Returns a file entry based on the passed. + ZipEntry MakeFileEntry(string fileName, bool useFileSystem); + + /// + /// Create a for a directory given its name + /// + /// The name of the directory to create an entry for. + /// Returns a directory entry based on the passed. + ZipEntry MakeDirectoryEntry(string directoryName); + + /// + /// Create a for a directory given its name + /// + /// The name of the directory to create an entry for. + /// If true get details from the file system for this directory if it exists. + /// Returns a directory entry based on the passed. + ZipEntry MakeDirectoryEntry(string directoryName, bool useFileSystem); + + /// + /// Get/set the applicable. + /// + INameTransform NameTransform { get; set; } + } +} diff --git a/src/GitHub.Api/SharpZipLib/Zip/WindowsNameTransform.cs b/src/GitHub.Api/SharpZipLib/Zip/WindowsNameTransform.cs new file mode 100644 index 000000000..0ab439375 --- /dev/null +++ b/src/GitHub.Api/SharpZipLib/Zip/WindowsNameTransform.cs @@ -0,0 +1,272 @@ +// WindowsNameTransform.cs +// +// Copyright 2007 John Reilly +// +// This program is free software; you can redistribute it and/or +// modify it under the terms of the GNU General Public License +// as published by the Free Software Foundation; either version 2 +// of the License, or (at your option) any later version. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. +// +// You should have received a copy of the GNU General Public License +// along with this program; if not, write to the Free Software +// Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. +// +// Linking this library statically or dynamically with other modules is +// making a combined work based on this library. Thus, the terms and +// conditions of the GNU General Public License cover the whole +// combination. +// +// As a special exception, the copyright holders of this library give you +// permission to link this library with independent modules to produce an +// executable, regardless of the license terms of these independent +// modules, and to copy and distribute the resulting executable under +// terms of your choice, provided that you also meet, for each linked +// independent module, the terms and conditions of the license of that +// module. An independent module is a module which is not derived from +// or based on this library. If you modify this library, you may extend +// this exception to your version of the library, but you are not +// obligated to do so. If you do not wish to do so, delete this +// exception statement from your version. + +using System; +using System.IO; +using System.Text; + +using GitHub.ICSharpCode.SharpZipLib.Core; + +namespace GitHub.ICSharpCode.SharpZipLib.Zip +{ + /// + /// WindowsNameTransform transforms names to windows compatible ones. + /// + public class WindowsNameTransform : INameTransform + { + /// + /// Initialises a new instance of + /// + /// + public WindowsNameTransform(string baseDirectory) + { + if ( baseDirectory == null ) { + throw new ArgumentNullException("baseDirectory", "Directory name is invalid"); + } + + BaseDirectory = baseDirectory; + } + + /// + /// Initialise a default instance of + /// + public WindowsNameTransform() + { + // Do nothing. + } + + /// + /// Gets or sets a value containing the target directory to prefix values with. + /// + public string BaseDirectory + { + get { return _baseDirectory; } + set { + if ( value == null ) { + throw new ArgumentNullException("value"); + } + + _baseDirectory = Path.GetFullPath(value); + } + } + + /// + /// Gets or sets a value indicating wether paths on incoming values should be removed. + /// + public bool TrimIncomingPaths + { + get { return _trimIncomingPaths; } + set { _trimIncomingPaths = value; } + } + + /// + /// Transform a Zip directory name to a windows directory name. + /// + /// The directory name to transform. + /// The transformed name. + public string TransformDirectory(string name) + { + name = TransformFile(name); + if (name.Length > 0) { + while ( name.EndsWith(@"\") ) { + name = name.Remove(name.Length - 1, 1); + } + } + else { + throw new ZipException("Cannot have an empty directory name"); + } + return name; + } + + /// + /// Transform a Zip format file name to a windows style one. + /// + /// The file name to transform. + /// The transformed name. + public string TransformFile(string name) + { + if (name != null) { + name = MakeValidName(name, _replacementChar); + + if ( _trimIncomingPaths ) { + name = Path.GetFileName(name); + } + + // This may exceed windows length restrictions. + // Combine will throw a PathTooLongException in that case. + if ( _baseDirectory != null ) { + name = Path.Combine(_baseDirectory, name); + } + } + else { + name = string.Empty; + } + return name; + } + + /// + /// Test a name to see if it is a valid name for a windows filename as extracted from a Zip archive. + /// + /// The name to test. + /// Returns true if the name is a valid zip name; false otherwise. + /// The filename isnt a true windows path in some fundamental ways like no absolute paths, no rooted paths etc. + public static bool IsValidName(string name) + { + bool result = + (name != null) && + (name.Length <= MaxPath) && + (string.Compare(name, MakeValidName(name, '_')) == 0) + ; + + return result; + } + + /// + /// Initialise static class information. + /// + static WindowsNameTransform() + { + char[] invalidPathChars; + +#if NET_1_0 || NET_1_1 || NETCF_1_0 + invalidPathChars = Path.InvalidPathChars; +#else + invalidPathChars = Path.GetInvalidPathChars(); +#endif + int howMany = invalidPathChars.Length + 3; + + InvalidEntryChars = new char[howMany]; + Array.Copy(invalidPathChars, 0, InvalidEntryChars, 0, invalidPathChars.Length); + InvalidEntryChars[howMany - 1] = '*'; + InvalidEntryChars[howMany - 2] = '?'; + InvalidEntryChars[howMany - 3] = ':'; + } + + /// + /// Force a name to be valid by replacing invalid characters with a fixed value + /// + /// The name to make valid + /// The replacement character to use for any invalid characters. + /// Returns a valid name + public static string MakeValidName(string name, char replacement) + { + if ( name == null ) { + throw new ArgumentNullException("name"); + } + + name = WindowsPathUtils.DropPathRoot(name.Replace("/", @"\")); + + // Drop any leading slashes. + while ( (name.Length > 0) && (name[0] == '\\')) { + name = name.Remove(0, 1); + } + + // Drop any trailing slashes. + while ( (name.Length > 0) && (name[name.Length - 1] == '\\')) { + name = name.Remove(name.Length - 1, 1); + } + + // Convert consecutive \\ characters to \ + int index = name.IndexOf(@"\\"); + while (index >= 0) { + name = name.Remove(index, 1); + index = name.IndexOf(@"\\"); + } + + // Convert any invalid characters using the replacement one. + index = name.IndexOfAny(InvalidEntryChars); + if (index >= 0) { + StringBuilder builder = new StringBuilder(name); + + while (index >= 0 ) { + builder[index] = replacement; + + if (index >= name.Length) { + index = -1; + } + else { + index = name.IndexOfAny(InvalidEntryChars, index + 1); + } + } + name = builder.ToString(); + } + + // Check for names greater than MaxPath characters. + // TODO: Were is CLR version of MaxPath defined? Can't find it in Environment. + if ( name.Length > MaxPath ) { + throw new PathTooLongException(); + } + + return name; + } + + /// + /// Gets or set the character to replace invalid characters during transformations. + /// + public char Replacement + { + get { return _replacementChar; } + set { + for ( int i = 0; i < InvalidEntryChars.Length; ++i ) { + if ( InvalidEntryChars[i] == value ) { + throw new ArgumentException("invalid path character"); + } + } + + if ((value == '\\') || (value == '/')) { + throw new ArgumentException("invalid replacement character"); + } + + _replacementChar = value; + } + } + + /// + /// The maximum windows path name permitted. + /// + /// This may not valid for all windows systems - CE?, etc but I cant find the equivalent in the CLR. + const int MaxPath = 260; + + #region Instance Fields + string _baseDirectory; + bool _trimIncomingPaths; + char _replacementChar = '_'; + #endregion + + #region Class Fields + static readonly char[] InvalidEntryChars; + #endregion + } +} diff --git a/src/GitHub.Api/SharpZipLib/Zip/ZipConstants.cs b/src/GitHub.Api/SharpZipLib/Zip/ZipConstants.cs new file mode 100644 index 000000000..e544eafd3 --- /dev/null +++ b/src/GitHub.Api/SharpZipLib/Zip/ZipConstants.cs @@ -0,0 +1,632 @@ +// ZipConstants.cs +// +// Copyright (C) 2001 Mike Krueger +// Copyright (C) 2004 John Reilly +// +// This file was translated from java, it was part of the GNU Classpath +// Copyright (C) 2001 Free Software Foundation, Inc. +// +// This program is free software; you can redistribute it and/or +// modify it under the terms of the GNU General Public License +// as published by the Free Software Foundation; either version 2 +// of the License, or (at your option) any later version. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. +// +// You should have received a copy of the GNU General Public License +// along with this program; if not, write to the Free Software +// Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. +// +// Linking this library statically or dynamically with other modules is +// making a combined work based on this library. Thus, the terms and +// conditions of the GNU General Public License cover the whole +// combination. +// +// As a special exception, the copyright holders of this library give you +// permission to link this library with independent modules to produce an +// executable, regardless of the license terms of these independent +// modules, and to copy and distribute the resulting executable under +// terms of your choice, provided that you also meet, for each linked +// independent module, the terms and conditions of the license of that +// module. An independent module is a module which is not derived from +// or based on this library. If you modify this library, you may extend +// this exception to your version of the library, but you are not +// obligated to do so. If you do not wish to do so, delete this +// exception statement from your version. + +// HISTORY +// 22-12-2009 DavidPierson Added AES support + +using System; +using System.Text; +using System.Threading; + +#if NETCF_1_0 || NETCF_2_0 +using System.Globalization; +#endif + +namespace GitHub.ICSharpCode.SharpZipLib.Zip +{ + + #region Enumerations + + /// + /// Determines how entries are tested to see if they should use Zip64 extensions or not. + /// + public enum UseZip64 + { + /// + /// Zip64 will not be forced on entries during processing. + /// + /// An entry can have this overridden if required + Off, + /// + /// Zip64 should always be used. + /// + On, + /// + /// #ZipLib will determine use based on entry values when added to archive. + /// + Dynamic, + } + + /// + /// The kind of compression used for an entry in an archive + /// + public enum CompressionMethod + { + /// + /// A direct copy of the file contents is held in the archive + /// + Stored = 0, + + /// + /// Common Zip compression method using a sliding dictionary + /// of up to 32KB and secondary compression from Huffman/Shannon-Fano trees + /// + Deflated = 8, + + /// + /// An extension to deflate with a 64KB window. Not supported by #Zip currently + /// + Deflate64 = 9, + + /// + /// BZip2 compression. Not supported by #Zip. + /// + BZip2 = 11, + + /// + /// WinZip special for AES encryption, Now supported by #Zip. + /// + WinZipAES = 99, + + } + + /// + /// Identifies the encryption algorithm used for an entry + /// + public enum EncryptionAlgorithm + { + /// + /// No encryption has been used. + /// + None = 0, + /// + /// Encrypted using PKZIP 2.0 or 'classic' encryption. + /// + PkzipClassic = 1, + /// + /// DES encryption has been used. + /// + Des = 0x6601, + /// + /// RCS encryption has been used for encryption. + /// + RC2 = 0x6602, + /// + /// Triple DES encryption with 168 bit keys has been used for this entry. + /// + TripleDes168 = 0x6603, + /// + /// Triple DES with 112 bit keys has been used for this entry. + /// + TripleDes112 = 0x6609, + /// + /// AES 128 has been used for encryption. + /// + Aes128 = 0x660e, + /// + /// AES 192 has been used for encryption. + /// + Aes192 = 0x660f, + /// + /// AES 256 has been used for encryption. + /// + Aes256 = 0x6610, + /// + /// RC2 corrected has been used for encryption. + /// + RC2Corrected = 0x6702, + /// + /// Blowfish has been used for encryption. + /// + Blowfish = 0x6720, + /// + /// Twofish has been used for encryption. + /// + Twofish = 0x6721, + /// + /// RC4 has been used for encryption. + /// + RC4 = 0x6801, + /// + /// An unknown algorithm has been used for encryption. + /// + Unknown = 0xffff + } + + /// + /// Defines the contents of the general bit flags field for an archive entry. + /// + [Flags] + public enum GeneralBitFlags : int + { + /// + /// Bit 0 if set indicates that the file is encrypted + /// + Encrypted = 0x0001, + /// + /// Bits 1 and 2 - Two bits defining the compression method (only for Method 6 Imploding and 8,9 Deflating) + /// + Method = 0x0006, + /// + /// Bit 3 if set indicates a trailing data desciptor is appended to the entry data + /// + Descriptor = 0x0008, + /// + /// Bit 4 is reserved for use with method 8 for enhanced deflation + /// + ReservedPKware4 = 0x0010, + /// + /// Bit 5 if set indicates the file contains Pkzip compressed patched data. + /// Requires version 2.7 or greater. + /// + Patched = 0x0020, + /// + /// Bit 6 if set indicates strong encryption has been used for this entry. + /// + StrongEncryption = 0x0040, + /// + /// Bit 7 is currently unused + /// + Unused7 = 0x0080, + /// + /// Bit 8 is currently unused + /// + Unused8 = 0x0100, + /// + /// Bit 9 is currently unused + /// + Unused9 = 0x0200, + /// + /// Bit 10 is currently unused + /// + Unused10 = 0x0400, + /// + /// Bit 11 if set indicates the filename and + /// comment fields for this file must be encoded using UTF-8. + /// + UnicodeText = 0x0800, + /// + /// Bit 12 is documented as being reserved by PKware for enhanced compression. + /// + EnhancedCompress = 0x1000, + /// + /// Bit 13 if set indicates that values in the local header are masked to hide + /// their actual values, and the central directory is encrypted. + /// + /// + /// Used when encrypting the central directory contents. + /// + HeaderMasked = 0x2000, + /// + /// Bit 14 is documented as being reserved for use by PKware + /// + ReservedPkware14 = 0x4000, + /// + /// Bit 15 is documented as being reserved for use by PKware + /// + ReservedPkware15 = 0x8000 + } + + #endregion + + /// + /// This class contains constants used for Zip format files + /// + public sealed class ZipConstants + { + #region Versions + /// + /// The version made by field for entries in the central header when created by this library + /// + /// + /// This is also the Zip version for the library when comparing against the version required to extract + /// for an entry. See . + /// + public const int VersionMadeBy = 51; // was 45 before AES + + /// + /// The version made by field for entries in the central header when created by this library + /// + /// + /// This is also the Zip version for the library when comparing against the version required to extract + /// for an entry. See ZipInputStream.CanDecompressEntry. + /// + [Obsolete("Use VersionMadeBy instead")] + public const int VERSION_MADE_BY = 51; + + /// + /// The minimum version required to support strong encryption + /// + public const int VersionStrongEncryption = 50; + + /// + /// The minimum version required to support strong encryption + /// + [Obsolete("Use VersionStrongEncryption instead")] + public const int VERSION_STRONG_ENCRYPTION = 50; + + /// + /// Version indicating AES encryption + /// + public const int VERSION_AES = 51; + + /// + /// The version required for Zip64 extensions (4.5 or higher) + /// + public const int VersionZip64 = 45; + #endregion + + #region Header Sizes + /// + /// Size of local entry header (excluding variable length fields at end) + /// + public const int LocalHeaderBaseSize = 30; + + /// + /// Size of local entry header (excluding variable length fields at end) + /// + [Obsolete("Use LocalHeaderBaseSize instead")] + public const int LOCHDR = 30; + + /// + /// Size of Zip64 data descriptor + /// + public const int Zip64DataDescriptorSize = 24; + + /// + /// Size of data descriptor + /// + public const int DataDescriptorSize = 16; + + /// + /// Size of data descriptor + /// + [Obsolete("Use DataDescriptorSize instead")] + public const int EXTHDR = 16; + + /// + /// Size of central header entry (excluding variable fields) + /// + public const int CentralHeaderBaseSize = 46; + + /// + /// Size of central header entry + /// + [Obsolete("Use CentralHeaderBaseSize instead")] + public const int CENHDR = 46; + + /// + /// Size of end of central record (excluding variable fields) + /// + public const int EndOfCentralRecordBaseSize = 22; + + /// + /// Size of end of central record (excluding variable fields) + /// + [Obsolete("Use EndOfCentralRecordBaseSize instead")] + public const int ENDHDR = 22; + + /// + /// Size of 'classic' cryptographic header stored before any entry data + /// + public const int CryptoHeaderSize = 12; + + /// + /// Size of cryptographic header stored before entry data + /// + [Obsolete("Use CryptoHeaderSize instead")] + public const int CRYPTO_HEADER_SIZE = 12; + #endregion + + #region Header Signatures + + /// + /// Signature for local entry header + /// + public const int LocalHeaderSignature = 'P' | ('K' << 8) | (3 << 16) | (4 << 24); + + /// + /// Signature for local entry header + /// + [Obsolete("Use LocalHeaderSignature instead")] + public const int LOCSIG = 'P' | ('K' << 8) | (3 << 16) | (4 << 24); + + /// + /// Signature for spanning entry + /// + public const int SpanningSignature = 'P' | ('K' << 8) | (7 << 16) | (8 << 24); + + /// + /// Signature for spanning entry + /// + [Obsolete("Use SpanningSignature instead")] + public const int SPANNINGSIG = 'P' | ('K' << 8) | (7 << 16) | (8 << 24); + + /// + /// Signature for temporary spanning entry + /// + public const int SpanningTempSignature = 'P' | ('K' << 8) | ('0' << 16) | ('0' << 24); + + /// + /// Signature for temporary spanning entry + /// + [Obsolete("Use SpanningTempSignature instead")] + public const int SPANTEMPSIG = 'P' | ('K' << 8) | ('0' << 16) | ('0' << 24); + + /// + /// Signature for data descriptor + /// + /// + /// This is only used where the length, Crc, or compressed size isnt known when the + /// entry is created and the output stream doesnt support seeking. + /// The local entry cannot be 'patched' with the correct values in this case + /// so the values are recorded after the data prefixed by this header, as well as in the central directory. + /// + public const int DataDescriptorSignature = 'P' | ('K' << 8) | (7 << 16) | (8 << 24); + + /// + /// Signature for data descriptor + /// + /// + /// This is only used where the length, Crc, or compressed size isnt known when the + /// entry is created and the output stream doesnt support seeking. + /// The local entry cannot be 'patched' with the correct values in this case + /// so the values are recorded after the data prefixed by this header, as well as in the central directory. + /// + [Obsolete("Use DataDescriptorSignature instead")] + public const int EXTSIG = 'P' | ('K' << 8) | (7 << 16) | (8 << 24); + + /// + /// Signature for central header + /// + [Obsolete("Use CentralHeaderSignature instead")] + public const int CENSIG = 'P' | ('K' << 8) | (1 << 16) | (2 << 24); + + /// + /// Signature for central header + /// + public const int CentralHeaderSignature = 'P' | ('K' << 8) | (1 << 16) | (2 << 24); + + /// + /// Signature for Zip64 central file header + /// + public const int Zip64CentralFileHeaderSignature = 'P' | ('K' << 8) | (6 << 16) | (6 << 24); + + /// + /// Signature for Zip64 central file header + /// + [Obsolete("Use Zip64CentralFileHeaderSignature instead")] + public const int CENSIG64 = 'P' | ('K' << 8) | (6 << 16) | (6 << 24); + + /// + /// Signature for Zip64 central directory locator + /// + public const int Zip64CentralDirLocatorSignature = 'P' | ('K' << 8) | (6 << 16) | (7 << 24); + + /// + /// Signature for archive extra data signature (were headers are encrypted). + /// + public const int ArchiveExtraDataSignature = 'P' | ('K' << 8) | (6 << 16) | (7 << 24); + + /// + /// Central header digitial signature + /// + public const int CentralHeaderDigitalSignature = 'P' | ('K' << 8) | (5 << 16) | (5 << 24); + + /// + /// Central header digitial signature + /// + [Obsolete("Use CentralHeaderDigitalSignaure instead")] + public const int CENDIGITALSIG = 'P' | ('K' << 8) | (5 << 16) | (5 << 24); + + /// + /// End of central directory record signature + /// + public const int EndOfCentralDirectorySignature = 'P' | ('K' << 8) | (5 << 16) | (6 << 24); + + /// + /// End of central directory record signature + /// + [Obsolete("Use EndOfCentralDirectorySignature instead")] + public const int ENDSIG = 'P' | ('K' << 8) | (5 << 16) | (6 << 24); + #endregion + +#if NETCF_1_0 || NETCF_2_0 + // This isnt so great but is better than nothing. + // Trying to work out an appropriate OEM code page would be good. + // 850 is a good default for english speakers particularly in Europe. + static int defaultCodePage = CultureInfo.CurrentCulture.TextInfo.ANSICodePage; +#else + static int defaultCodePage = Thread.CurrentThread.CurrentCulture.TextInfo.OEMCodePage; +#endif + + /// + /// Default encoding used for string conversion. 0 gives the default system OEM code page. + /// Dont use unicode encodings if you want to be Zip compatible! + /// Using the default code page isnt the full solution neccessarily + /// there are many variable factors, codepage 850 is often a good choice for + /// European users, however be careful about compatability. + /// + public static int DefaultCodePage { + get { + return defaultCodePage; + } + set { + defaultCodePage = value; + } + } + + /// + /// Convert a portion of a byte array to a string. + /// + /// + /// Data to convert to string + /// + /// + /// Number of bytes to convert starting from index 0 + /// + /// + /// data[0]..data[length - 1] converted to a string + /// + public static string ConvertToString(byte[] data, int count) + { + if ( data == null ) { + return string.Empty; + } + + return Encoding.GetEncoding(DefaultCodePage).GetString(data, 0, count); + } + + /// + /// Convert a byte array to string + /// + /// + /// Byte array to convert + /// + /// + /// dataconverted to a string + /// + public static string ConvertToString(byte[] data) + { + if ( data == null ) { + return string.Empty; + } + return ConvertToString(data, data.Length); + } + + /// + /// Convert a byte array to string + /// + /// The applicable general purpose bits flags + /// + /// Byte array to convert + /// + /// The number of bytes to convert. + /// + /// dataconverted to a string + /// + public static string ConvertToStringExt(int flags, byte[] data, int count) + { + if ( data == null ) { + return string.Empty; + } + + if ( (flags & (int)GeneralBitFlags.UnicodeText) != 0 ) { + return Encoding.UTF8.GetString(data, 0, count); + } + else { + return ConvertToString(data, count); + } + } + + /// + /// Convert a byte array to string + /// + /// + /// Byte array to convert + /// + /// The applicable general purpose bits flags + /// + /// dataconverted to a string + /// + public static string ConvertToStringExt(int flags, byte[] data) + { + if ( data == null ) { + return string.Empty; + } + + if ( (flags & (int)GeneralBitFlags.UnicodeText) != 0 ) { + return Encoding.UTF8.GetString(data, 0, data.Length); + } + else { + return ConvertToString(data, data.Length); + } + } + + /// + /// Convert a string to a byte array + /// + /// + /// String to convert to an array + /// + /// Converted array + public static byte[] ConvertToArray(string str) + { + if ( str == null ) { + return new byte[0]; + } + + return Encoding.GetEncoding(DefaultCodePage).GetBytes(str); + } + + /// + /// Convert a string to a byte array + /// + /// The applicable general purpose bits flags + /// + /// String to convert to an array + /// + /// Converted array + public static byte[] ConvertToArray(int flags, string str) + { + if (str == null) { + return new byte[0]; + } + + if ((flags & (int)GeneralBitFlags.UnicodeText) != 0) { + return Encoding.UTF8.GetBytes(str); + } + else { + return ConvertToArray(str); + } + } + + + /// + /// Initialise default instance of ZipConstants + /// + /// + /// Private to prevent instances being created. + /// + ZipConstants() + { + // Do nothing + } + } +} diff --git a/src/GitHub.Api/SharpZipLib/Zip/ZipEntry.cs b/src/GitHub.Api/SharpZipLib/Zip/ZipEntry.cs new file mode 100644 index 000000000..b1464c035 --- /dev/null +++ b/src/GitHub.Api/SharpZipLib/Zip/ZipEntry.cs @@ -0,0 +1,1252 @@ +// ZipEntry.cs +// +// Copyright (C) 2001 Mike Krueger +// Copyright (C) 2004 John Reilly +// +// This file was translated from java, it was part of the GNU Classpath +// Copyright (C) 2001 Free Software Foundation, Inc. +// +// This program is free software; you can redistribute it and/or +// modify it under the terms of the GNU General Public License +// as published by the Free Software Foundation; either version 2 +// of the License, or (at your option) any later version. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. +// +// You should have received a copy of the GNU General Public License +// along with this program; if not, write to the Free Software +// Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. +// +// Linking this library statically or dynamically with other modules is +// making a combined work based on this library. Thus, the terms and +// conditions of the GNU General Public License cover the whole +// combination. +// +// As a special exception, the copyright holders of this library give you +// permission to link this library with independent modules to produce an +// executable, regardless of the license terms of these independent +// modules, and to copy and distribute the resulting executable under +// terms of your choice, provided that you also meet, for each linked +// independent module, the terms and conditions of the license of that +// module. An independent module is a module which is not derived from +// or based on this library. If you modify this library, you may extend +// this exception to your version of the library, but you are not +// obligated to do so. If you do not wish to do so, delete this +// exception statement from your version. + +// HISTORY +// 22-12-2009 DavidPierson Added AES support +// 02-02-2010 DavidPierson Changed NTFS Extra Data min length to 4 + +using System; +using System.IO; + +namespace GitHub.ICSharpCode.SharpZipLib.Zip +{ + + /// + /// Defines known values for the property. + /// + public enum HostSystemID + { + /// + /// Host system = MSDOS + /// + Msdos = 0, + /// + /// Host system = Amiga + /// + Amiga = 1, + /// + /// Host system = Open VMS + /// + OpenVms = 2, + /// + /// Host system = Unix + /// + Unix = 3, + /// + /// Host system = VMCms + /// + VMCms = 4, + /// + /// Host system = Atari ST + /// + AtariST = 5, + /// + /// Host system = OS2 + /// + OS2 = 6, + /// + /// Host system = Macintosh + /// + Macintosh = 7, + /// + /// Host system = ZSystem + /// + ZSystem = 8, + /// + /// Host system = Cpm + /// + Cpm = 9, + /// + /// Host system = Windows NT + /// + WindowsNT = 10, + /// + /// Host system = MVS + /// + MVS = 11, + /// + /// Host system = VSE + /// + Vse = 12, + /// + /// Host system = Acorn RISC + /// + AcornRisc = 13, + /// + /// Host system = VFAT + /// + Vfat = 14, + /// + /// Host system = Alternate MVS + /// + AlternateMvs = 15, + /// + /// Host system = BEOS + /// + BeOS = 16, + /// + /// Host system = Tandem + /// + Tandem = 17, + /// + /// Host system = OS400 + /// + OS400 = 18, + /// + /// Host system = OSX + /// + OSX = 19, + /// + /// Host system = WinZIP AES + /// + WinZipAES = 99, + } + + /// + /// This class represents an entry in a zip archive. This can be a file + /// or a directory + /// ZipFile and ZipInputStream will give you instances of this class as + /// information about the members in an archive. ZipOutputStream + /// uses an instance of this class when creating an entry in a Zip file. + ///
+ ///
Author of the original java version : Jochen Hoenicke + ///
+ public class ZipEntry : ICloneable + { + [Flags] + enum Known : byte + { + None = 0, + Size = 0x01, + CompressedSize = 0x02, + Crc = 0x04, + Time = 0x08, + ExternalAttributes = 0x10, + } + + #region Constructors + /// + /// Creates a zip entry with the given name. + /// + /// + /// The name for this entry. Can include directory components. + /// The convention for names is 'unix' style paths with relative names only. + /// There are with no device names and path elements are separated by '/' characters. + /// + /// + /// The name passed is null + /// + public ZipEntry(string name) + : this(name, 0, ZipConstants.VersionMadeBy, CompressionMethod.Deflated) + { + } + + /// + /// Creates a zip entry with the given name and version required to extract + /// + /// + /// The name for this entry. Can include directory components. + /// The convention for names is 'unix' style paths with no device names and + /// path elements separated by '/' characters. This is not enforced see CleanName + /// on how to ensure names are valid if this is desired. + /// + /// + /// The minimum 'feature version' required this entry + /// + /// + /// The name passed is null + /// + internal ZipEntry(string name, int versionRequiredToExtract) + : this(name, versionRequiredToExtract, ZipConstants.VersionMadeBy, + CompressionMethod.Deflated) + { + } + + /// + /// Initializes an entry with the given name and made by information + /// + /// Name for this entry + /// Version and HostSystem Information + /// Minimum required zip feature version required to extract this entry + /// Compression method for this entry. + /// + /// The name passed is null + /// + /// + /// versionRequiredToExtract should be 0 (auto-calculate) or > 10 + /// + /// + /// This constructor is used by the ZipFile class when reading from the central header + /// It is not generally useful, use the constructor specifying the name only. + /// + internal ZipEntry(string name, int versionRequiredToExtract, int madeByInfo, + CompressionMethod method) + { + if (name == null) { + throw new System.ArgumentNullException("name"); + } + + if ( name.Length > 0xffff ) { + throw new ArgumentException("Name is too long", "name"); + } + + if ( (versionRequiredToExtract != 0) && (versionRequiredToExtract < 10) ) { + throw new ArgumentOutOfRangeException("versionRequiredToExtract"); + } + + this.DateTime = System.DateTime.Now; + this.name = name; + this.versionMadeBy = (ushort)madeByInfo; + this.versionToExtract = (ushort)versionRequiredToExtract; + this.method = method; + } + + /// + /// Creates a deep copy of the given zip entry. + /// + /// + /// The entry to copy. + /// + [Obsolete("Use Clone instead")] + public ZipEntry(ZipEntry entry) + { + if ( entry == null ) { + throw new ArgumentNullException("entry"); + } + + known = entry.known; + name = entry.name; + size = entry.size; + compressedSize = entry.compressedSize; + crc = entry.crc; + dosTime = entry.dosTime; + method = entry.method; + comment = entry.comment; + versionToExtract = entry.versionToExtract; + versionMadeBy = entry.versionMadeBy; + externalFileAttributes = entry.externalFileAttributes; + flags = entry.flags; + + zipFileIndex = entry.zipFileIndex; + offset = entry.offset; + + forceZip64_ = entry.forceZip64_; + + if ( entry.extra != null ) { + extra = new byte[entry.extra.Length]; + Array.Copy(entry.extra, 0, extra, 0, entry.extra.Length); + } + } + + #endregion + + /// + /// Get a value indicating wether the entry has a CRC value available. + /// + public bool HasCrc + { + get { + return (known & Known.Crc) != 0; + } + } + + /// + /// Get/Set flag indicating if entry is encrypted. + /// A simple helper routine to aid interpretation of flags + /// + /// This is an assistant that interprets the flags property. + public bool IsCrypted + { + get { + return (flags & 1) != 0; + } + set { + if (value) { + flags |= 1; + } + else { + flags &= ~1; + } + } + } + + /// + /// Get / set a flag indicating wether entry name and comment text are + /// encoded in unicode UTF8. + /// + /// This is an assistant that interprets the flags property. + public bool IsUnicodeText + { + get { + return ( flags & (int)GeneralBitFlags.UnicodeText ) != 0; + } + set { + if ( value ) { + flags |= (int)GeneralBitFlags.UnicodeText; + } + else { + flags &= ~(int)GeneralBitFlags.UnicodeText; + } + } + } + + /// + /// Value used during password checking for PKZIP 2.0 / 'classic' encryption. + /// + internal byte CryptoCheckValue + { + get { + return cryptoCheckValue_; + } + + set { + cryptoCheckValue_ = value; + } + } + + /// + /// Get/Set general purpose bit flag for entry + /// + /// + /// General purpose bit flag
+ ///
+ /// Bit 0: If set, indicates the file is encrypted
+ /// Bit 1-2 Only used for compression type 6 Imploding, and 8, 9 deflating
+ /// Imploding:
+ /// Bit 1 if set indicates an 8K sliding dictionary was used. If clear a 4k dictionary was used
+ /// Bit 2 if set indicates 3 Shannon-Fanno trees were used to encode the sliding dictionary, 2 otherwise
+ ///
+ /// Deflating:
+ /// Bit 2 Bit 1
+ /// 0 0 Normal compression was used
+ /// 0 1 Maximum compression was used
+ /// 1 0 Fast compression was used
+ /// 1 1 Super fast compression was used
+ ///
+ /// Bit 3: If set, the fields crc-32, compressed size + /// and uncompressed size are were not able to be written during zip file creation + /// The correct values are held in a data descriptor immediately following the compressed data.
+ /// Bit 4: Reserved for use by PKZIP for enhanced deflating
+ /// Bit 5: If set indicates the file contains compressed patch data
+ /// Bit 6: If set indicates strong encryption was used.
+ /// Bit 7-10: Unused or reserved
+ /// Bit 11: If set the name and comments for this entry are in unicode.
+ /// Bit 12-15: Unused or reserved
+ ///
+ /// + /// + public int Flags + { + get { + return flags; + } + set { + flags = value; + } + } + + /// + /// Get/Set index of this entry in Zip file + /// + /// This is only valid when the entry is part of a + public long ZipFileIndex + { + get { + return zipFileIndex; + } + set { + zipFileIndex = value; + } + } + + /// + /// Get/set offset for use in central header + /// + public long Offset + { + get { + return offset; + } + set { + offset = value; + } + } + + /// + /// Get/Set external file attributes as an integer. + /// The values of this are operating system dependant see + /// HostSystem for details + /// + public int ExternalFileAttributes + { + get { + if ((known & Known.ExternalAttributes) == 0) { + return -1; + } + else { + return externalFileAttributes; + } + } + + set { + externalFileAttributes = value; + known |= Known.ExternalAttributes; + } + } + + /// + /// Get the version made by for this entry or zero if unknown. + /// The value / 10 indicates the major version number, and + /// the value mod 10 is the minor version number + /// + public int VersionMadeBy + { + get { + return (versionMadeBy & 0xff); + } + } + + /// + /// Get a value indicating this entry is for a DOS/Windows system. + /// + public bool IsDOSEntry + { + get { + return ((HostSystem == ( int )HostSystemID.Msdos) || + (HostSystem == ( int )HostSystemID.WindowsNT)); + } + } + + /// + /// Test the external attributes for this to + /// see if the external attributes are Dos based (including WINNT and variants) + /// and match the values + /// + /// The attributes to test. + /// Returns true if the external attributes are known to be DOS/Windows + /// based and have the same attributes set as the value passed. + bool HasDosAttributes(int attributes) + { + bool result = false; + if ( (known & Known.ExternalAttributes) != 0 ) { + if ( ((HostSystem == (int)HostSystemID.Msdos) || + (HostSystem == (int)HostSystemID.WindowsNT)) && + (ExternalFileAttributes & attributes) == attributes) { + result = true; + } + } + return result; + } + + /// + /// Gets the compatability information for the external file attribute + /// If the external file attributes are compatible with MS-DOS and can be read + /// by PKZIP for DOS version 2.04g then this value will be zero. Otherwise the value + /// will be non-zero and identify the host system on which the attributes are compatible. + /// + /// + /// + /// The values for this as defined in the Zip File format and by others are shown below. The values are somewhat + /// misleading in some cases as they are not all used as shown. You should consult the relevant documentation + /// to obtain up to date and correct information. The modified appnote by the infozip group is + /// particularly helpful as it documents a lot of peculiarities. The document is however a little dated. + /// + /// 0 - MS-DOS and OS/2 (FAT / VFAT / FAT32 file systems) + /// 1 - Amiga + /// 2 - OpenVMS + /// 3 - Unix + /// 4 - VM/CMS + /// 5 - Atari ST + /// 6 - OS/2 HPFS + /// 7 - Macintosh + /// 8 - Z-System + /// 9 - CP/M + /// 10 - Windows NTFS + /// 11 - MVS (OS/390 - Z/OS) + /// 12 - VSE + /// 13 - Acorn Risc + /// 14 - VFAT + /// 15 - Alternate MVS + /// 16 - BeOS + /// 17 - Tandem + /// 18 - OS/400 + /// 19 - OS/X (Darwin) + /// 99 - WinZip AES + /// remainder - unused + /// + /// + public int HostSystem + { + get { + return (versionMadeBy >> 8) & 0xff; + } + + set { + versionMadeBy &= 0xff; + versionMadeBy |= (ushort)((value & 0xff) << 8); + } + } + + /// + /// Get minimum Zip feature version required to extract this entry + /// + /// + /// Minimum features are defined as:
+ /// 1.0 - Default value
+ /// 1.1 - File is a volume label
+ /// 2.0 - File is a folder/directory
+ /// 2.0 - File is compressed using Deflate compression
+ /// 2.0 - File is encrypted using traditional encryption
+ /// 2.1 - File is compressed using Deflate64
+ /// 2.5 - File is compressed using PKWARE DCL Implode
+ /// 2.7 - File is a patch data set
+ /// 4.5 - File uses Zip64 format extensions
+ /// 4.6 - File is compressed using BZIP2 compression
+ /// 5.0 - File is encrypted using DES
+ /// 5.0 - File is encrypted using 3DES
+ /// 5.0 - File is encrypted using original RC2 encryption
+ /// 5.0 - File is encrypted using RC4 encryption
+ /// 5.1 - File is encrypted using AES encryption
+ /// 5.1 - File is encrypted using corrected RC2 encryption
+ /// 5.1 - File is encrypted using corrected RC2-64 encryption
+ /// 6.1 - File is encrypted using non-OAEP key wrapping
+ /// 6.2 - Central directory encryption (not confirmed yet)
+ /// 6.3 - File is compressed using LZMA
+ /// 6.3 - File is compressed using PPMD+
+ /// 6.3 - File is encrypted using Blowfish
+ /// 6.3 - File is encrypted using Twofish
+ ///
+ /// + public int Version + { + get { + // Return recorded version if known. + if (versionToExtract != 0) { + return versionToExtract; + } + else { + int result = 10; + if (AESKeySize > 0) { + result = ZipConstants.VERSION_AES; // Ver 5.1 = AES + } + else if (CentralHeaderRequiresZip64) { + result = ZipConstants.VersionZip64; + } + else if (CompressionMethod.Deflated == method) { + result = 20; + } + else if (IsDirectory == true) { + result = 20; + } + else if (IsCrypted == true) { + result = 20; + } + else if (HasDosAttributes(0x08) ) { + result = 11; + } + return result; + } + } + } + + /// + /// Get a value indicating whether this entry can be decompressed by the library. + /// + /// This is based on the and + /// wether the compression method is supported. + public bool CanDecompress + { + get { + return (Version <= ZipConstants.VersionMadeBy) && + ((Version == 10) || + (Version == 11) || + (Version == 20) || + (Version == 45) || + (Version == 51)) && + IsCompressionMethodSupported(); + } + } + + /// + /// Force this entry to be recorded using Zip64 extensions. + /// + public void ForceZip64() + { + forceZip64_ = true; + } + + /// + /// Get a value indicating wether Zip64 extensions were forced. + /// + /// A value of true if Zip64 extensions have been forced on; false if not. + public bool IsZip64Forced() + { + return forceZip64_; + } + + /// + /// Gets a value indicating if the entry requires Zip64 extensions + /// to store the full entry values. + /// + /// A value of true if a local header requires Zip64 extensions; false if not. + public bool LocalHeaderRequiresZip64 + { + get { + bool result = forceZip64_; + + if ( !result ) { + ulong trueCompressedSize = compressedSize; + + if ( (versionToExtract == 0) && IsCrypted ) { + trueCompressedSize += ZipConstants.CryptoHeaderSize; + } + + // TODO: A better estimation of the true limit based on compression overhead should be used + // to determine when an entry should use Zip64. + result = + ((this.size >= uint.MaxValue) || (trueCompressedSize >= uint.MaxValue)) && + ((versionToExtract == 0) || (versionToExtract >= ZipConstants.VersionZip64)); + } + + return result; + } + } + + /// + /// Get a value indicating wether the central directory entry requires Zip64 extensions to be stored. + /// + public bool CentralHeaderRequiresZip64 + { + get { + return LocalHeaderRequiresZip64 || (offset >= uint.MaxValue); + } + } + + /// + /// Get/Set DosTime value. + /// + /// + /// The MS-DOS date format can only represent dates between 1/1/1980 and 12/31/2107. + /// + public long DosTime + { + get { + if ((known & Known.Time) == 0) { + return 0; + } + else { + return dosTime; + } + } + + set { + unchecked { + dosTime = (uint)value; + } + + known |= Known.Time; + } + } + + /// + /// Gets/Sets the time of last modification of the entry. + /// + /// + /// The property is updated to match this as far as possible. + /// + public DateTime DateTime + { + get { + uint sec = Math.Min(59, 2 * (dosTime & 0x1f)); + uint min = Math.Min(59, (dosTime >> 5) & 0x3f); + uint hrs = Math.Min(23, (dosTime >> 11) & 0x1f); + uint mon = Math.Max(1, Math.Min(12, ((dosTime >> 21) & 0xf))); + uint year = ((dosTime >> 25) & 0x7f) + 1980; + int day = Math.Max(1, Math.Min(DateTime.DaysInMonth((int)year, (int)mon), (int)((dosTime >> 16) & 0x1f))); + return new System.DateTime((int)year, (int)mon, day, (int)hrs, (int)min, (int)sec); + } + + set { + uint year = (uint) value.Year; + uint month = (uint) value.Month; + uint day = (uint) value.Day; + uint hour = (uint) value.Hour; + uint minute = (uint) value.Minute; + uint second = (uint) value.Second; + + if ( year < 1980 ) { + year = 1980; + month = 1; + day = 1; + hour = 0; + minute = 0; + second = 0; + } + else if ( year > 2107 ) { + year = 2107; + month = 12; + day = 31; + hour = 23; + minute = 59; + second = 59; + } + + DosTime = ((year - 1980) & 0x7f) << 25 | + (month << 21) | + (day << 16) | + (hour << 11) | + (minute << 5) | + (second >> 1); + } + } + + /// + /// Returns the entry name. + /// + /// + /// The unix naming convention is followed. + /// Path components in the entry should always separated by forward slashes ('/'). + /// Dos device names like C: should also be removed. + /// See the class, or + /// + public string Name + { + get { + return name; + } + } + + /// + /// Gets/Sets the size of the uncompressed data. + /// + /// + /// The size or -1 if unknown. + /// + /// Setting the size before adding an entry to an archive can help + /// avoid compatability problems with some archivers which dont understand Zip64 extensions. + public long Size + { + get { + return (known & Known.Size) != 0 ? (long)size : -1L; + } + set { + this.size = (ulong)value; + this.known |= Known.Size; + } + } + + /// + /// Gets/Sets the size of the compressed data. + /// + /// + /// The compressed entry size or -1 if unknown. + /// + public long CompressedSize + { + get { + return (known & Known.CompressedSize) != 0 ? (long)compressedSize : -1L; + } + set { + this.compressedSize = (ulong)value; + this.known |= Known.CompressedSize; + } + } + + /// + /// Gets/Sets the crc of the uncompressed data. + /// + /// + /// Crc is not in the range 0..0xffffffffL + /// + /// + /// The crc value or -1 if unknown. + /// + public long Crc + { + get { + return (known & Known.Crc) != 0 ? crc & 0xffffffffL : -1L; + } + set { + if (((ulong)crc & 0xffffffff00000000L) != 0) { + throw new ArgumentOutOfRangeException("value"); + } + this.crc = (uint)value; + this.known |= Known.Crc; + } + } + + /// + /// Gets/Sets the compression method. Only Deflated and Stored are supported. + /// + /// + /// The compression method for this entry + /// + /// + /// + public CompressionMethod CompressionMethod { + get { + return method; + } + + set { + if ( !IsCompressionMethodSupported(value) ) { + throw new NotSupportedException("Compression method not supported"); + } + this.method = value; + } + } + + /// + /// Gets the compression method for outputting to the local or central header. + /// Returns same value as CompressionMethod except when AES encrypting, which + /// places 99 in the method and places the real method in the extra data. + /// + internal CompressionMethod CompressionMethodForHeader { + get { + return (AESKeySize > 0) ? CompressionMethod.WinZipAES : method; + } + } + + /// + /// Gets/Sets the extra data. + /// + /// + /// Extra data is longer than 64KB (0xffff) bytes. + /// + /// + /// Extra data or null if not set. + /// + public byte[] ExtraData { + + get { +// TODO: This is slightly safer but less efficient. Think about wether it should change. +// return (byte[]) extra.Clone(); + return extra; + } + + set { + if (value == null) { + extra = null; + } + else { + if (value.Length > 0xffff) { + throw new System.ArgumentOutOfRangeException("value"); + } + + extra = new byte[value.Length]; + Array.Copy(value, 0, extra, 0, value.Length); + } + } + } + + +#if !NET_1_1 && !NETCF_2_0 + /// + /// For AES encrypted files returns or sets the number of bits of encryption (128, 192 or 256). + /// When setting, only 0 (off), 128 or 256 is supported. + /// + public int AESKeySize { + get { + // the strength (1 or 3) is in the entry header + switch (_aesEncryptionStrength) { + case 0: return 0; // Not AES + case 1: return 128; + case 2: return 192; // Not used by WinZip + case 3: return 256; + default: throw new ZipException("Invalid AESEncryptionStrength " + _aesEncryptionStrength); + } + } + set { + switch (value) { + case 0: _aesEncryptionStrength = 0; break; + case 128: _aesEncryptionStrength = 1; break; + case 256: _aesEncryptionStrength = 3; break; + default: throw new ZipException("AESKeySize must be 0, 128 or 256: " + value); + } + } + } + + /// + /// AES Encryption strength for storage in extra data in entry header. + /// 1 is 128 bit, 2 is 192 bit, 3 is 256 bit. + /// + internal byte AESEncryptionStrength { + get { + return (byte)_aesEncryptionStrength; + } + } +#else + /// + /// AES unsupported prior to .NET 2.0 + /// + internal int AESKeySize; +#endif + + /// + /// Returns the length of the salt, in bytes + /// + internal int AESSaltLen { + get { + // Key size -> Salt length: 128 bits = 8 bytes, 192 bits = 12 bytes, 256 bits = 16 bytes. + return AESKeySize / 16; + } + } + + /// + /// Number of extra bytes required to hold the AES Header fields (Salt, Pwd verify, AuthCode) + /// + internal int AESOverheadSize { + get { + // File format: + // Bytes Content + // Variable Salt value + // 2 Password verification value + // Variable Encrypted file data + // 10 Authentication code + return 12 + AESSaltLen; + } + } + + /// + /// Process extra data fields updating the entry based on the contents. + /// + /// True if the extra data fields should be handled + /// for a local header, rather than for a central header. + /// + internal void ProcessExtraData(bool localHeader) + { + ZipExtraData extraData = new ZipExtraData(this.extra); + + if ( extraData.Find(0x0001) ) { + // Version required to extract is ignored here as some archivers dont set it correctly + // in theory it should be version 45 or higher + + // The recorded size will change but remember that this is zip64. + forceZip64_ = true; + + if ( extraData.ValueLength < 4 ) { + throw new ZipException("Extra data extended Zip64 information length is invalid"); + } + + if ( localHeader || (size == uint.MaxValue) ) { + size = (ulong)extraData.ReadLong(); + } + + if ( localHeader || (compressedSize == uint.MaxValue) ) { + compressedSize = (ulong)extraData.ReadLong(); + } + + if ( !localHeader && (offset == uint.MaxValue) ) { + offset = extraData.ReadLong(); + } + + // Disk number on which file starts is ignored + } + else { + if ( + ((versionToExtract & 0xff) >= ZipConstants.VersionZip64) && + ((size == uint.MaxValue) || (compressedSize == uint.MaxValue)) + ) { + throw new ZipException("Zip64 Extended information required but is missing."); + } + } + + if ( extraData.Find(10) ) { + // No room for any tags. + if ( extraData.ValueLength < 4 ) { + throw new ZipException("NTFS Extra data invalid"); + } + + extraData.ReadInt(); // Reserved + + while ( extraData.UnreadCount >= 4 ) { + int ntfsTag = extraData.ReadShort(); + int ntfsLength = extraData.ReadShort(); + if ( ntfsTag == 1 ) { + if ( ntfsLength >= 24 ) { + long lastModification = extraData.ReadLong(); + long lastAccess = extraData.ReadLong(); + long createTime = extraData.ReadLong(); + + DateTime = System.DateTime.FromFileTime(lastModification); + } + break; + } + else { + // An unknown NTFS tag so simply skip it. + extraData.Skip(ntfsLength); + } + } + } + else if ( extraData.Find(0x5455) ) { + int length = extraData.ValueLength; + int flags = extraData.ReadByte(); + + // Can include other times but these are ignored. Length of data should + // actually be 1 + 4 * no of bits in flags. + if ( ((flags & 1) != 0) && (length >= 5) ) { + int iTime = extraData.ReadInt(); + + DateTime = (new System.DateTime ( 1970, 1, 1, 0, 0, 0 ).ToUniversalTime() + + new TimeSpan ( 0, 0, 0, iTime, 0 )).ToLocalTime(); + } + } + if (method == CompressionMethod.WinZipAES) { + ProcessAESExtraData(extraData); + } + } + + // For AES the method in the entry is 99, and the real compression method is in the extradata + // + private void ProcessAESExtraData(ZipExtraData extraData) { + +#if !NET_1_1 && !NETCF_2_0 + if (extraData.Find(0x9901)) { + // Set version and flag for Zipfile.CreateAndInitDecryptionStream + versionToExtract = ZipConstants.VERSION_AES; // Ver 5.1 = AES see "Version" getter + // Set StrongEncryption flag for ZipFile.CreateAndInitDecryptionStream + Flags = Flags | (int)GeneralBitFlags.StrongEncryption; + // + // Unpack AES extra data field see http://www.winzip.com/aes_info.htm + int length = extraData.ValueLength; // Data size currently 7 + if (length < 7) + throw new ZipException("AES Extra Data Length " + length + " invalid."); + int ver = extraData.ReadShort(); // Version number (1=AE-1 2=AE-2) + int vendorId = extraData.ReadShort(); // 2-character vendor ID 0x4541 = "AE" + int encrStrength = extraData.ReadByte(); // encryption strength 1 = 128 2 = 192 3 = 256 + int actualCompress = extraData.ReadShort(); // The actual compression method used to compress the file + _aesVer = ver; + _aesEncryptionStrength = encrStrength; + method = (CompressionMethod)actualCompress; + } else + throw new ZipException("AES Extra Data missing"); +#else + throw new ZipException("AES unsupported"); +#endif + } + + /// + /// Gets/Sets the entry comment. + /// + /// + /// If comment is longer than 0xffff. + /// + /// + /// The comment or null if not set. + /// + /// + /// A comment is only available for entries when read via the class. + /// The class doesnt have the comment data available. + /// + public string Comment { + get { + return comment; + } + set { + // This test is strictly incorrect as the length is in characters + // while the storage limit is in bytes. + // While the test is partially correct in that a comment of this length or greater + // is definitely invalid, shorter comments may also have an invalid length + // where there are multi-byte characters + // The full test is not possible here however as the code page to apply conversions with + // isnt available. + if ( (value != null) && (value.Length > 0xffff) ) { +#if NETCF_1_0 + throw new ArgumentOutOfRangeException("value"); +#else + throw new ArgumentOutOfRangeException("value", "cannot exceed 65535"); +#endif + } + + comment = value; + } + } + + /// + /// Gets a value indicating if the entry is a directory. + /// however. + /// + /// + /// A directory is determined by an entry name with a trailing slash '/'. + /// The external file attributes can also indicate an entry is for a directory. + /// Currently only dos/windows attributes are tested in this manner. + /// The trailing slash convention should always be followed. + /// + public bool IsDirectory + { + get { + int nameLength = name.Length; + bool result = + ((nameLength > 0) && + ((name[nameLength - 1] == '/') || (name[nameLength - 1] == '\\'))) || + HasDosAttributes(16) + ; + return result; + } + } + + /// + /// Get a value of true if the entry appears to be a file; false otherwise + /// + /// + /// This only takes account of DOS/Windows attributes. Other operating systems are ignored. + /// For linux and others the result may be incorrect. + /// + public bool IsFile + { + get { + return !IsDirectory && !HasDosAttributes(8); + } + } + + /// + /// Test entry to see if data can be extracted. + /// + /// Returns true if data can be extracted for this entry; false otherwise. + public bool IsCompressionMethodSupported() + { + return IsCompressionMethodSupported(CompressionMethod); + } + + #region ICloneable Members + /// + /// Creates a copy of this zip entry. + /// + /// An that is a copy of the current instance. + public object Clone() + { + ZipEntry result = (ZipEntry)this.MemberwiseClone(); + + // Ensure extra data is unique if it exists. + if ( extra != null ) { + result.extra = new byte[extra.Length]; + Array.Copy(extra, 0, result.extra, 0, extra.Length); + } + + return result; + } + + #endregion + + /// + /// Gets a string representation of this ZipEntry. + /// + /// A readable textual representation of this + public override string ToString() + { + return name; + } + + /// + /// Test a compression method to see if this library + /// supports extracting data compressed with that method + /// + /// The compression method to test. + /// Returns true if the compression method is supported; false otherwise + public static bool IsCompressionMethodSupported(CompressionMethod method) + { + return + ( method == CompressionMethod.Deflated ) || + ( method == CompressionMethod.Stored ); + } + + /// + /// Cleans a name making it conform to Zip file conventions. + /// Devices names ('c:\') and UNC share names ('\\server\share') are removed + /// and forward slashes ('\') are converted to back slashes ('/'). + /// Names are made relative by trimming leading slashes which is compatible + /// with the ZIP naming convention. + /// + /// The name to clean + /// The 'cleaned' name. + /// + /// The Zip name transform class is more flexible. + /// + public static string CleanName(string name) + { + if (name == null) { + return string.Empty; + } + + if (Path.IsPathRooted(name) == true) { + // NOTE: + // for UNC names... \\machine\share\zoom\beet.txt gives \zoom\beet.txt + name = name.Substring(Path.GetPathRoot(name).Length); + } + + name = name.Replace(@"\", "/"); + + while ( (name.Length > 0) && (name[0] == '/')) { + name = name.Remove(0, 1); + } + return name; + } + + #region Instance Fields + Known known; + int externalFileAttributes = -1; // contains external attributes (O/S dependant) + + ushort versionMadeBy; // Contains host system and version information + // only relevant for central header entries + + string name; + ulong size; + ulong compressedSize; + ushort versionToExtract; // Version required to extract (library handles <= 2.0) + uint crc; + uint dosTime; + + CompressionMethod method = CompressionMethod.Deflated; + byte[] extra; + string comment; + + int flags; // general purpose bit flags + + long zipFileIndex = -1; // used by ZipFile + long offset; // used by ZipFile and ZipOutputStream + + bool forceZip64_; + byte cryptoCheckValue_; +#if !NET_1_1 && !NETCF_2_0 + int _aesVer; // Version number (2 = AE-2 ?). Assigned but not used. + int _aesEncryptionStrength; // Encryption strength 1 = 128 2 = 192 3 = 256 +#endif + #endregion + } +} diff --git a/src/GitHub.Api/SharpZipLib/Zip/ZipEntryFactory.cs b/src/GitHub.Api/SharpZipLib/Zip/ZipEntryFactory.cs new file mode 100644 index 000000000..4e5e1c1e8 --- /dev/null +++ b/src/GitHub.Api/SharpZipLib/Zip/ZipEntryFactory.cs @@ -0,0 +1,413 @@ +// ZipEntryFactory.cs +// +// Copyright 2006 John Reilly +// +// Copyright (C) 2001 Free Software Foundation, Inc. +// +// This program is free software; you can redistribute it and/or +// modify it under the terms of the GNU General Public License +// as published by the Free Software Foundation; either version 2 +// of the License, or (at your option) any later version. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. +// +// You should have received a copy of the GNU General Public License +// along with this program; if not, write to the Free Software +// Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. +// +// Linking this library statically or dynamically with other modules is +// making a combined work based on this library. Thus, the terms and +// conditions of the GNU General Public License cover the whole +// combination. +// +// As a special exception, the copyright holders of this library give you +// permission to link this library with independent modules to produce an +// executable, regardless of the license terms of these independent +// modules, and to copy and distribute the resulting executable under +// terms of your choice, provided that you also meet, for each linked +// independent module, the terms and conditions of the license of that +// module. An independent module is a module which is not derived from +// or based on this library. If you modify this library, you may extend +// this exception to your version of the library, but you are not +// obligated to do so. If you do not wish to do so, delete this +// exception statement from your version. + +using System; +using System.IO; + +using GitHub.ICSharpCode.SharpZipLib.Core; + +namespace GitHub.ICSharpCode.SharpZipLib.Zip +{ + /// + /// Basic implementation of + /// + public class ZipEntryFactory : IEntryFactory + { + #region Enumerations + /// + /// Defines the possible values to be used for the . + /// + public enum TimeSetting + { + /// + /// Use the recorded LastWriteTime value for the file. + /// + LastWriteTime, + /// + /// Use the recorded LastWriteTimeUtc value for the file + /// + LastWriteTimeUtc, + /// + /// Use the recorded CreateTime value for the file. + /// + CreateTime, + /// + /// Use the recorded CreateTimeUtc value for the file. + /// + CreateTimeUtc, + /// + /// Use the recorded LastAccessTime value for the file. + /// + LastAccessTime, + /// + /// Use the recorded LastAccessTimeUtc value for the file. + /// + LastAccessTimeUtc, + /// + /// Use a fixed value. + /// + /// The actual value used can be + /// specified via the constructor or + /// using the with the setting set + /// to which will use the when this class was constructed. + /// The property can also be used to set this value. + Fixed, + } + #endregion + + #region Constructors + /// + /// Initialise a new instance of the class. + /// + /// A default , and the LastWriteTime for files is used. + public ZipEntryFactory() + { + nameTransform_ = new ZipNameTransform(); + } + + /// + /// Initialise a new instance of using the specified + /// + /// The time setting to use when creating Zip entries. + public ZipEntryFactory(TimeSetting timeSetting) + { + timeSetting_ = timeSetting; + nameTransform_ = new ZipNameTransform(); + } + + /// + /// Initialise a new instance of using the specified + /// + /// The time to set all values to. + public ZipEntryFactory(DateTime time) + { + timeSetting_ = TimeSetting.Fixed; + FixedDateTime = time; + nameTransform_ = new ZipNameTransform(); + } + + #endregion + + #region Properties + /// + /// Get / set the to be used when creating new values. + /// + /// + /// Setting this property to null will cause a default name transform to be used. + /// + public INameTransform NameTransform + { + get { return nameTransform_; } + set + { + if (value == null) { + nameTransform_ = new ZipNameTransform(); + } + else { + nameTransform_ = value; + } + } + } + + /// + /// Get / set the in use. + /// + public TimeSetting Setting + { + get { return timeSetting_; } + set { timeSetting_ = value; } + } + + /// + /// Get / set the value to use when is set to + /// + public DateTime FixedDateTime + { + get { return fixedDateTime_; } + set + { + if (value.Year < 1970) { + throw new ArgumentException("Value is too old to be valid", "value"); + } + fixedDateTime_ = value; + } + } + + /// + /// A bitmask defining the attributes to be retrieved from the actual file. + /// + /// The default is to get all possible attributes from the actual file. + public int GetAttributes + { + get { return getAttributes_; } + set { getAttributes_ = value; } + } + + /// + /// A bitmask defining which attributes are to be set on. + /// + /// By default no attributes are set on. + public int SetAttributes + { + get { return setAttributes_; } + set { setAttributes_ = value; } + } + + /// + /// Get set a value indicating wether unidoce text should be set on. + /// + public bool IsUnicodeText + { + get { return isUnicodeText_; } + set { isUnicodeText_ = value; } + } + + #endregion + + #region IEntryFactory Members + + /// + /// Make a new for a file. + /// + /// The name of the file to create a new entry for. + /// Returns a new based on the . + public ZipEntry MakeFileEntry(string fileName) + { + return MakeFileEntry(fileName, true); + } + + /// + /// Make a new from a name. + /// + /// The name of the file to create a new entry for. + /// If true entry detail is retrieved from the file system if the file exists. + /// Returns a new based on the . + public ZipEntry MakeFileEntry(string fileName, bool useFileSystem) + { + ZipEntry result = new ZipEntry(nameTransform_.TransformFile(fileName)); + result.IsUnicodeText = isUnicodeText_; + + int externalAttributes = 0; + bool useAttributes = (setAttributes_ != 0); + + FileInfo fi = null; + if (useFileSystem) + { + fi = new FileInfo(fileName); + } + + if ((fi != null) && fi.Exists) + { + switch (timeSetting_) + { + case TimeSetting.CreateTime: + result.DateTime = fi.CreationTime; + break; + + case TimeSetting.CreateTimeUtc: +#if NETCF_1_0 || NETCF_2_0 + result.DateTime = fi.CreationTime.ToUniversalTime(); +#else + result.DateTime = fi.CreationTimeUtc; +#endif + break; + + case TimeSetting.LastAccessTime: + result.DateTime = fi.LastAccessTime; + break; + + case TimeSetting.LastAccessTimeUtc: +#if NETCF_1_0 || NETCF_2_0 + result.DateTime = fi.LastAccessTime.ToUniversalTime(); +#else + result.DateTime = fi.LastAccessTimeUtc; +#endif + break; + + case TimeSetting.LastWriteTime: + result.DateTime = fi.LastWriteTime; + break; + + case TimeSetting.LastWriteTimeUtc: +#if NETCF_1_0 || NETCF_2_0 + result.DateTime = fi.LastWriteTime.ToUniversalTime(); +#else + result.DateTime = fi.LastWriteTimeUtc; +#endif + break; + + case TimeSetting.Fixed: + result.DateTime = fixedDateTime_; + break; + + default: + throw new ZipException("Unhandled time setting in MakeFileEntry"); + } + + result.Size = fi.Length; + + useAttributes = true; + externalAttributes = ((int)fi.Attributes & getAttributes_); + } + else + { + if (timeSetting_ == TimeSetting.Fixed) + { + result.DateTime = fixedDateTime_; + } + } + + if (useAttributes) + { + externalAttributes |= setAttributes_; + result.ExternalFileAttributes = externalAttributes; + } + + return result; + } + + /// + /// Make a new for a directory. + /// + /// The raw untransformed name for the new directory + /// Returns a new representing a directory. + public ZipEntry MakeDirectoryEntry(string directoryName) + { + return MakeDirectoryEntry(directoryName, true); + } + + /// + /// Make a new for a directory. + /// + /// The raw untransformed name for the new directory + /// If true entry detail is retrieved from the file system if the file exists. + /// Returns a new representing a directory. + public ZipEntry MakeDirectoryEntry(string directoryName, bool useFileSystem) + { + + ZipEntry result = new ZipEntry(nameTransform_.TransformDirectory(directoryName)); + result.IsUnicodeText = isUnicodeText_; + result.Size = 0; + + int externalAttributes = 0; + + DirectoryInfo di = null; + + if (useFileSystem) + { + di = new DirectoryInfo(directoryName); + } + + + if ((di != null) && di.Exists) + { + switch (timeSetting_) + { + case TimeSetting.CreateTime: + result.DateTime = di.CreationTime; + break; + + case TimeSetting.CreateTimeUtc: +#if NETCF_1_0 || NETCF_2_0 + result.DateTime = di.CreationTime.ToUniversalTime(); +#else + result.DateTime = di.CreationTimeUtc; +#endif + break; + + case TimeSetting.LastAccessTime: + result.DateTime = di.LastAccessTime; + break; + + case TimeSetting.LastAccessTimeUtc: +#if NETCF_1_0 || NETCF_2_0 + result.DateTime = di.LastAccessTime.ToUniversalTime(); +#else + result.DateTime = di.LastAccessTimeUtc; +#endif + break; + + case TimeSetting.LastWriteTime: + result.DateTime = di.LastWriteTime; + break; + + case TimeSetting.LastWriteTimeUtc: +#if NETCF_1_0 || NETCF_2_0 + result.DateTime = di.LastWriteTime.ToUniversalTime(); +#else + result.DateTime = di.LastWriteTimeUtc; +#endif + break; + + case TimeSetting.Fixed: + result.DateTime = fixedDateTime_; + break; + + default: + throw new ZipException("Unhandled time setting in MakeDirectoryEntry"); + } + + externalAttributes = ((int)di.Attributes & getAttributes_); + } + else + { + if (timeSetting_ == TimeSetting.Fixed) + { + result.DateTime = fixedDateTime_; + } + } + + // Always set directory attribute on. + externalAttributes |= (setAttributes_ | 16); + result.ExternalFileAttributes = externalAttributes; + + return result; + } + + #endregion + + #region Instance Fields + INameTransform nameTransform_; + DateTime fixedDateTime_ = DateTime.Now; + TimeSetting timeSetting_; + bool isUnicodeText_; + + int getAttributes_ = -1; + int setAttributes_; + #endregion + } +} diff --git a/src/GitHub.Api/SharpZipLib/Zip/ZipException.cs b/src/GitHub.Api/SharpZipLib/Zip/ZipException.cs new file mode 100644 index 000000000..7ceb4a063 --- /dev/null +++ b/src/GitHub.Api/SharpZipLib/Zip/ZipException.cs @@ -0,0 +1,94 @@ +// ZipException.cs +// +// Copyright (C) 2001 Mike Krueger +// +// This file was translated from java, it was part of the GNU Classpath +// Copyright (C) 1998, 1999, 2000, 2001 Free Software Foundation, Inc. +// +// This program is free software; you can redistribute it and/or +// modify it under the terms of the GNU General Public License +// as published by the Free Software Foundation; either version 2 +// of the License, or (at your option) any later version. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. +// +// You should have received a copy of the GNU General Public License +// along with this program; if not, write to the Free Software +// Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. +// +// Linking this library statically or dynamically with other modules is +// making a combined work based on this library. Thus, the terms and +// conditions of the GNU General Public License cover the whole +// combination. +// +// As a special exception, the copyright holders of this library give you +// permission to link this library with independent modules to produce an +// executable, regardless of the license terms of these independent +// modules, and to copy and distribute the resulting executable under +// terms of your choice, provided that you also meet, for each linked +// independent module, the terms and conditions of the license of that +// module. An independent module is a module which is not derived from +// or based on this library. If you modify this library, you may extend +// this exception to your version of the library, but you are not +// obligated to do so. If you do not wish to do so, delete this +// exception statement from your version. + +using System; + +#if !NETCF_1_0 && !NETCF_2_0 +using System.Runtime.Serialization; +#endif + +namespace GitHub.ICSharpCode.SharpZipLib.Zip +{ + + /// + /// Represents exception conditions specific to Zip archive handling + /// +#if !NETCF_1_0 && !NETCF_2_0 + [Serializable] +#endif + public class ZipException : SharpZipBaseException + { +#if !NETCF_1_0 && !NETCF_2_0 + /// + /// Deserialization constructor + /// + /// for this constructor + /// for this constructor + protected ZipException(SerializationInfo info, StreamingContext context ) + : base( info, context ) + { + } +#endif + + /// + /// Initializes a new instance of the ZipException class. + /// + public ZipException() + { + } + + /// + /// Initializes a new instance of the ZipException class with a specified error message. + /// + /// The error message that explains the reason for the exception. + public ZipException(string message) + : base(message) + { + } + + /// + /// Initialise a new instance of ZipException. + /// + /// A message describing the error. + /// The exception that is the cause of the current exception. + public ZipException(string message, Exception exception) + : base(message, exception) + { + } + } +} diff --git a/src/GitHub.Api/SharpZipLib/Zip/ZipExtraData.cs b/src/GitHub.Api/SharpZipLib/Zip/ZipExtraData.cs new file mode 100644 index 000000000..533ba9c33 --- /dev/null +++ b/src/GitHub.Api/SharpZipLib/Zip/ZipExtraData.cs @@ -0,0 +1,987 @@ +// +// ZipExtraData.cs +// +// Copyright 2004-2007 John Reilly +// +// This program is free software; you can redistribute it and/or +// modify it under the terms of the GNU General Public License +// as published by the Free Software Foundation; either version 2 +// of the License, or (at your option) any later version. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. +// +// You should have received a copy of the GNU General Public License +// along with this program; if not, write to the Free Software +// Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. +// +// Linking this library statically or dynamically with other modules is +// making a combined work based on this library. Thus, the terms and +// conditions of the GNU General Public License cover the whole +// combination. +// +// As a special exception, the copyright holders of this library give you +// permission to link this library with independent modules to produce an +// executable, regardless of the license terms of these independent +// modules, and to copy and distribute the resulting executable under +// terms of your choice, provided that you also meet, for each linked +// independent module, the terms and conditions of the license of that +// module. An independent module is a module which is not derived from +// or based on this library. If you modify this library, you may extend +// this exception to your version of the library, but you are not +// obligated to do so. If you do not wish to do so, delete this +// exception statement from your version. + +using System; +using System.IO; + +namespace GitHub.ICSharpCode.SharpZipLib.Zip +{ + // TODO: Sort out wether tagged data is useful and what a good implementation might look like. + // Its just a sketch of an idea at the moment. + + /// + /// ExtraData tagged value interface. + /// + public interface ITaggedData + { + /// + /// Get the ID for this tagged data value. + /// + short TagID { get; } + + /// + /// Set the contents of this instance from the data passed. + /// + /// The data to extract contents from. + /// The offset to begin extracting data from. + /// The number of bytes to extract. + void SetData(byte[] data, int offset, int count); + + /// + /// Get the data representing this instance. + /// + /// Returns the data for this instance. + byte[] GetData(); + } + + /// + /// A raw binary tagged value + /// + public class RawTaggedData : ITaggedData + { + /// + /// Initialise a new instance. + /// + /// The tag ID. + public RawTaggedData(short tag) + { + _tag = tag; + } + + #region ITaggedData Members + + /// + /// Get the ID for this tagged data value. + /// + public short TagID + { + get { return _tag; } + set { _tag = value; } + } + + /// + /// Set the data from the raw values provided. + /// + /// The raw data to extract values from. + /// The index to start extracting values from. + /// The number of bytes available. + public void SetData(byte[] data, int offset, int count) + { + if( data==null ) + { + throw new ArgumentNullException("data"); + } + + _data=new byte[count]; + Array.Copy(data, offset, _data, 0, count); + } + + /// + /// Get the binary data representing this instance. + /// + /// The raw binary data representing this instance. + public byte[] GetData() + { + return _data; + } + + #endregion + + /// + /// Get /set the binary data representing this instance. + /// + /// The raw binary data representing this instance. + public byte[] Data + { + get { return _data; } + set { _data=value; } + } + + #region Instance Fields + /// + /// The tag ID for this instance. + /// + short _tag; + + byte[] _data; + #endregion + } + + /// + /// Class representing extended unix date time values. + /// + public class ExtendedUnixData : ITaggedData + { + /// + /// Flags indicate which values are included in this instance. + /// + [Flags] + public enum Flags : byte + { + /// + /// The modification time is included + /// + ModificationTime = 0x01, + + /// + /// The access time is included + /// + AccessTime = 0x02, + + /// + /// The create time is included. + /// + CreateTime = 0x04, + } + + #region ITaggedData Members + + /// + /// Get the ID + /// + public short TagID + { + get { return 0x5455; } + } + + /// + /// Set the data from the raw values provided. + /// + /// The raw data to extract values from. + /// The index to start extracting values from. + /// The number of bytes available. + public void SetData(byte[] data, int index, int count) + { + using (MemoryStream ms = new MemoryStream(data, index, count, false)) + using (ZipHelperStream helperStream = new ZipHelperStream(ms)) + { + // bit 0 if set, modification time is present + // bit 1 if set, access time is present + // bit 2 if set, creation time is present + + _flags = (Flags)helperStream.ReadByte(); + if (((_flags & Flags.ModificationTime) != 0) && (count >= 5)) + { + int iTime = helperStream.ReadLEInt(); + + _modificationTime = (new DateTime(1970, 1, 1, 0, 0, 0).ToUniversalTime() + + new TimeSpan(0, 0, 0, iTime, 0)).ToLocalTime(); + } + + if ((_flags & Flags.AccessTime) != 0) + { + int iTime = helperStream.ReadLEInt(); + + _lastAccessTime = (new DateTime(1970, 1, 1, 0, 0, 0).ToUniversalTime() + + new TimeSpan(0, 0, 0, iTime, 0)).ToLocalTime(); + } + + if ((_flags & Flags.CreateTime) != 0) + { + int iTime = helperStream.ReadLEInt(); + + _createTime = (new DateTime(1970, 1, 1, 0, 0, 0).ToUniversalTime() + + new TimeSpan(0, 0, 0, iTime, 0)).ToLocalTime(); + } + } + } + + /// + /// Get the binary data representing this instance. + /// + /// The raw binary data representing this instance. + public byte[] GetData() + { + using (MemoryStream ms = new MemoryStream()) + using (ZipHelperStream helperStream = new ZipHelperStream(ms)) + { + helperStream.IsStreamOwner = false; + helperStream.WriteByte((byte)_flags); // Flags + if ( (_flags & Flags.ModificationTime) != 0) { + TimeSpan span = _modificationTime.ToUniversalTime() - new DateTime(1970, 1, 1, 0, 0, 0).ToUniversalTime(); + int seconds = (int)span.TotalSeconds; + helperStream.WriteLEInt(seconds); + } + if ( (_flags & Flags.AccessTime) != 0) { + TimeSpan span = _lastAccessTime.ToUniversalTime() - new DateTime(1970, 1, 1, 0, 0, 0).ToUniversalTime(); + int seconds = (int)span.TotalSeconds; + helperStream.WriteLEInt(seconds); + } + if ( (_flags & Flags.CreateTime) != 0) { + TimeSpan span = _createTime.ToUniversalTime() - new DateTime(1970, 1, 1, 0, 0, 0).ToUniversalTime(); + int seconds = (int)span.TotalSeconds; + helperStream.WriteLEInt(seconds); + } + return ms.ToArray(); + } + } + + #endregion + + /// + /// Test a value to see if is valid and can be represented here. + /// + /// The value to test. + /// Returns true if the value is valid and can be represented; false if not. + /// The standard Unix time is a signed integer data type, directly encoding the Unix time number, + /// which is the number of seconds since 1970-01-01. + /// Being 32 bits means the values here cover a range of about 136 years. + /// The minimum representable time is 1901-12-13 20:45:52, + /// and the maximum representable time is 2038-01-19 03:14:07. + /// + public static bool IsValidValue(DateTime value) + { + return (( value >= new DateTime(1901, 12, 13, 20, 45, 52)) || + ( value <= new DateTime(2038, 1, 19, 03, 14, 07) )); + } + + /// + /// Get /set the Modification Time + /// + /// + /// + public DateTime ModificationTime + { + get { return _modificationTime; } + set + { + if ( !IsValidValue(value) ) { + throw new ArgumentOutOfRangeException("value"); + } + + _flags |= Flags.ModificationTime; + _modificationTime=value; + } + } + + /// + /// Get / set the Access Time + /// + /// + /// + public DateTime AccessTime + { + get { return _lastAccessTime; } + set { + if ( !IsValidValue(value) ) { + throw new ArgumentOutOfRangeException("value"); + } + + _flags |= Flags.AccessTime; + _lastAccessTime=value; + } + } + + /// + /// Get / Set the Create Time + /// + /// + /// + public DateTime CreateTime + { + get { return _createTime; } + set { + if ( !IsValidValue(value) ) { + throw new ArgumentOutOfRangeException("value"); + } + + _flags |= Flags.CreateTime; + _createTime=value; + } + } + + /// + /// Get/set the values to include. + /// + Flags Include + { + get { return _flags; } + set { _flags = value; } + } + + #region Instance Fields + Flags _flags; + DateTime _modificationTime = new DateTime(1970,1,1); + DateTime _lastAccessTime = new DateTime(1970, 1, 1); + DateTime _createTime = new DateTime(1970, 1, 1); + #endregion + } + + /// + /// Class handling NT date time values. + /// + public class NTTaggedData : ITaggedData + { + /// + /// Get the ID for this tagged data value. + /// + public short TagID + { + get { return 10; } + } + + /// + /// Set the data from the raw values provided. + /// + /// The raw data to extract values from. + /// The index to start extracting values from. + /// The number of bytes available. + public void SetData(byte[] data, int index, int count) + { + using (MemoryStream ms = new MemoryStream(data, index, count, false)) + using (ZipHelperStream helperStream = new ZipHelperStream(ms)) + { + helperStream.ReadLEInt(); // Reserved + while (helperStream.Position < helperStream.Length) + { + int ntfsTag = helperStream.ReadLEShort(); + int ntfsLength = helperStream.ReadLEShort(); + if (ntfsTag == 1) + { + if (ntfsLength >= 24) + { + long lastModificationTicks = helperStream.ReadLELong(); + _lastModificationTime = DateTime.FromFileTime(lastModificationTicks); + + long lastAccessTicks = helperStream.ReadLELong(); + _lastAccessTime = DateTime.FromFileTime(lastAccessTicks); + + long createTimeTicks = helperStream.ReadLELong(); + _createTime = DateTime.FromFileTime(createTimeTicks); + } + break; + } + else + { + // An unknown NTFS tag so simply skip it. + helperStream.Seek(ntfsLength, SeekOrigin.Current); + } + } + } + } + + /// + /// Get the binary data representing this instance. + /// + /// The raw binary data representing this instance. + public byte[] GetData() + { + using (MemoryStream ms = new MemoryStream()) + using (ZipHelperStream helperStream = new ZipHelperStream(ms)) + { + helperStream.IsStreamOwner = false; + helperStream.WriteLEInt(0); // Reserved + helperStream.WriteLEShort(1); // Tag + helperStream.WriteLEShort(24); // Length = 3 x 8. + helperStream.WriteLELong(_lastModificationTime.ToFileTime()); + helperStream.WriteLELong(_lastAccessTime.ToFileTime()); + helperStream.WriteLELong(_createTime.ToFileTime()); + return ms.ToArray(); + } + } + + /// + /// Test a valuie to see if is valid and can be represented here. + /// + /// The value to test. + /// Returns true if the value is valid and can be represented; false if not. + /// + /// NTFS filetimes are 64-bit unsigned integers, stored in Intel + /// (least significant byte first) byte order. They determine the + /// number of 1.0E-07 seconds (1/10th microseconds!) past WinNT "epoch", + /// which is "01-Jan-1601 00:00:00 UTC". 28 May 60056 is the upper limit + /// + public static bool IsValidValue(DateTime value) + { + bool result = true; + try + { + value.ToFileTimeUtc(); + } + catch + { + result = false; + } + return result; + } + + /// + /// Get/set the last modification time. + /// + public DateTime LastModificationTime + { + get { return _lastModificationTime; } + set { + if (! IsValidValue(value)) + { + throw new ArgumentOutOfRangeException("value"); + } + _lastModificationTime = value; + } + } + + /// + /// Get /set the create time + /// + public DateTime CreateTime + { + get { return _createTime; } + set { + if ( !IsValidValue(value)) { + throw new ArgumentOutOfRangeException("value"); + } + _createTime = value; + } + } + + /// + /// Get /set the last access time. + /// + public DateTime LastAccessTime + { + get { return _lastAccessTime; } + set { + if (!IsValidValue(value)) { + throw new ArgumentOutOfRangeException("value"); + } + _lastAccessTime = value; + } + } + + #region Instance Fields + DateTime _lastAccessTime = DateTime.FromFileTime(0); + DateTime _lastModificationTime = DateTime.FromFileTime(0); + DateTime _createTime = DateTime.FromFileTime(0); + #endregion + } + + /// + /// A factory that creates tagged data instances. + /// + interface ITaggedDataFactory + { + /// + /// Get data for a specific tag value. + /// + /// The tag ID to find. + /// The data to search. + /// The offset to begin extracting data from. + /// The number of bytes to extract. + /// The located value found, or null if not found. + ITaggedData Create(short tag, byte[] data, int offset, int count); + } + + /// + /// + /// A class to handle the extra data field for Zip entries + /// + /// + /// Extra data contains 0 or more values each prefixed by a header tag and length. + /// They contain zero or more bytes of actual data. + /// The data is held internally using a copy on write strategy. This is more efficient but + /// means that for extra data created by passing in data can have the values modified by the caller + /// in some circumstances. + /// + sealed public class ZipExtraData : IDisposable + { + #region Constructors + /// + /// Initialise a default instance. + /// + public ZipExtraData() + { + Clear(); + } + + /// + /// Initialise with known extra data. + /// + /// The extra data. + public ZipExtraData(byte[] data) + { + if ( data == null ) + { + _data = new byte[0]; + } + else + { + _data = data; + } + } + #endregion + + /// + /// Get the raw extra data value + /// + /// Returns the raw byte[] extra data this instance represents. + public byte[] GetEntryData() + { + if ( Length > ushort.MaxValue ) { + throw new ZipException("Data exceeds maximum length"); + } + + return (byte[])_data.Clone(); + } + + /// + /// Clear the stored data. + /// + public void Clear() + { + if ( (_data == null) || (_data.Length != 0) ) { + _data = new byte[0]; + } + } + + /// + /// Gets the current extra data length. + /// + public int Length + { + get { return _data.Length; } + } + + /// + /// Get a read-only for the associated tag. + /// + /// The tag to locate data for. + /// Returns a containing tag data or null if no tag was found. + public Stream GetStreamForTag(int tag) + { + Stream result = null; + if ( Find(tag) ) { + result = new MemoryStream(_data, _index, _readValueLength, false); + } + return result; + } + + /// + /// Get the tagged data for a tag. + /// + /// The tag to search for. + /// Returns a tagged value or null if none found. + private ITaggedData GetData(short tag) + { + ITaggedData result = null; + if (Find(tag)) + { + result = Create(tag, _data, _readValueStart, _readValueLength); + } + return result; + } + + static ITaggedData Create(short tag, byte[] data, int offset, int count) + { + ITaggedData result = null; + switch ( tag ) + { + case 0x000A: + result = new NTTaggedData(); + break; + case 0x5455: + result = new ExtendedUnixData(); + break; + default: + result = new RawTaggedData(tag); + break; + } + result.SetData(data, offset, count); + return result; + } + + /// + /// Get the length of the last value found by + /// + /// This is only valid if has previously returned true. + public int ValueLength + { + get { return _readValueLength; } + } + + /// + /// Get the index for the current read value. + /// + /// This is only valid if has previously returned true. + /// Initially the result will be the index of the first byte of actual data. The value is updated after calls to + /// , and . + public int CurrentReadIndex + { + get { return _index; } + } + + /// + /// Get the number of bytes remaining to be read for the current value; + /// + public int UnreadCount + { + get + { + if ((_readValueStart > _data.Length) || + (_readValueStart < 4) ) { + throw new ZipException("Find must be called before calling a Read method"); + } + + return _readValueStart + _readValueLength - _index; + } + } + + /// + /// Find an extra data value + /// + /// The identifier for the value to find. + /// Returns true if the value was found; false otherwise. + public bool Find(int headerID) + { + _readValueStart = _data.Length; + _readValueLength = 0; + _index = 0; + + int localLength = _readValueStart; + int localTag = headerID - 1; + + // Trailing bytes that cant make up an entry (as there arent enough + // bytes for a tag and length) are ignored! + while ( (localTag != headerID) && (_index < _data.Length - 3) ) { + localTag = ReadShortInternal(); + localLength = ReadShortInternal(); + if ( localTag != headerID ) { + _index += localLength; + } + } + + bool result = (localTag == headerID) && ((_index + localLength) <= _data.Length); + + if ( result ) { + _readValueStart = _index; + _readValueLength = localLength; + } + + return result; + } + + /// + /// Add a new entry to extra data. + /// + /// The value to add. + public void AddEntry(ITaggedData taggedData) + { + if (taggedData == null) + { + throw new ArgumentNullException("taggedData"); + } + AddEntry(taggedData.TagID, taggedData.GetData()); + } + + /// + /// Add a new entry to extra data + /// + /// The ID for this entry. + /// The data to add. + /// If the ID already exists its contents are replaced. + public void AddEntry(int headerID, byte[] fieldData) + { + if ( (headerID > ushort.MaxValue) || (headerID < 0)) { + throw new ArgumentOutOfRangeException("headerID"); + } + + int addLength = (fieldData == null) ? 0 : fieldData.Length; + + if ( addLength > ushort.MaxValue ) { +#if NETCF_1_0 + throw new ArgumentOutOfRangeException("fieldData"); +#else + throw new ArgumentOutOfRangeException("fieldData", "exceeds maximum length"); +#endif + } + + // Test for new length before adjusting data. + int newLength = _data.Length + addLength + 4; + + if ( Find(headerID) ) + { + newLength -= (ValueLength + 4); + } + + if ( newLength > ushort.MaxValue ) { + throw new ZipException("Data exceeds maximum length"); + } + + Delete(headerID); + + byte[] newData = new byte[newLength]; + _data.CopyTo(newData, 0); + int index = _data.Length; + _data = newData; + SetShort(ref index, headerID); + SetShort(ref index, addLength); + if ( fieldData != null ) { + fieldData.CopyTo(newData, index); + } + } + + /// + /// Start adding a new entry. + /// + /// Add data using , , , or . + /// The new entry is completed and actually added by calling + /// + public void StartNewEntry() + { + _newEntry = new MemoryStream(); + } + + /// + /// Add entry data added since using the ID passed. + /// + /// The identifier to use for this entry. + public void AddNewEntry(int headerID) + { + byte[] newData = _newEntry.ToArray(); + _newEntry = null; + AddEntry(headerID, newData); + } + + /// + /// Add a byte of data to the pending new entry. + /// + /// The byte to add. + /// + public void AddData(byte data) + { + _newEntry.WriteByte(data); + } + + /// + /// Add data to a pending new entry. + /// + /// The data to add. + /// + public void AddData(byte[] data) + { + if ( data == null ) { + throw new ArgumentNullException("data"); + } + + _newEntry.Write(data, 0, data.Length); + } + + /// + /// Add a short value in little endian order to the pending new entry. + /// + /// The data to add. + /// + public void AddLeShort(int toAdd) + { + unchecked { + _newEntry.WriteByte(( byte )toAdd); + _newEntry.WriteByte(( byte )(toAdd >> 8)); + } + } + + /// + /// Add an integer value in little endian order to the pending new entry. + /// + /// The data to add. + /// + public void AddLeInt(int toAdd) + { + unchecked { + AddLeShort(( short )toAdd); + AddLeShort(( short )(toAdd >> 16)); + } + } + + /// + /// Add a long value in little endian order to the pending new entry. + /// + /// The data to add. + /// + public void AddLeLong(long toAdd) + { + unchecked { + AddLeInt(( int )(toAdd & 0xffffffff)); + AddLeInt(( int )(toAdd >> 32)); + } + } + + /// + /// Delete an extra data field. + /// + /// The identifier of the field to delete. + /// Returns true if the field was found and deleted. + public bool Delete(int headerID) + { + bool result = false; + + if ( Find(headerID) ) { + result = true; + int trueStart = _readValueStart - 4; + + byte[] newData = new byte[_data.Length - (ValueLength + 4)]; + Array.Copy(_data, 0, newData, 0, trueStart); + + int trueEnd = trueStart + ValueLength + 4; + Array.Copy(_data, trueEnd, newData, trueStart, _data.Length - trueEnd); + _data = newData; + } + return result; + } + + #region Reading Support + /// + /// Read a long in little endian form from the last found data value + /// + /// Returns the long value read. + public long ReadLong() + { + ReadCheck(8); + return (ReadInt() & 0xffffffff) | ((( long )ReadInt()) << 32); + } + + /// + /// Read an integer in little endian form from the last found data value. + /// + /// Returns the integer read. + public int ReadInt() + { + ReadCheck(4); + + int result = _data[_index] + (_data[_index + 1] << 8) + + (_data[_index + 2] << 16) + (_data[_index + 3] << 24); + _index += 4; + return result; + } + + /// + /// Read a short value in little endian form from the last found data value. + /// + /// Returns the short value read. + public int ReadShort() + { + ReadCheck(2); + int result = _data[_index] + (_data[_index + 1] << 8); + _index += 2; + return result; + } + + /// + /// Read a byte from an extra data + /// + /// The byte value read or -1 if the end of data has been reached. + public int ReadByte() + { + int result = -1; + if ( (_index < _data.Length) && (_readValueStart + _readValueLength > _index) ) { + result = _data[_index]; + _index += 1; + } + return result; + } + + /// + /// Skip data during reading. + /// + /// The number of bytes to skip. + public void Skip(int amount) + { + ReadCheck(amount); + _index += amount; + } + + void ReadCheck(int length) + { + if ((_readValueStart > _data.Length) || + (_readValueStart < 4) ) { + throw new ZipException("Find must be called before calling a Read method"); + } + + if (_index > _readValueStart + _readValueLength - length ) { + throw new ZipException("End of extra data"); + } + + if ( _index + length < 4 ) { + throw new ZipException("Cannot read before start of tag"); + } + } + + /// + /// Internal form of that reads data at any location. + /// + /// Returns the short value read. + int ReadShortInternal() + { + if ( _index > _data.Length - 2) { + throw new ZipException("End of extra data"); + } + + int result = _data[_index] + (_data[_index + 1] << 8); + _index += 2; + return result; + } + + void SetShort(ref int index, int source) + { + _data[index] = (byte)source; + _data[index + 1] = (byte)(source >> 8); + index += 2; + } + + #endregion + + #region IDisposable Members + + /// + /// Dispose of this instance. + /// + public void Dispose() + { + if ( _newEntry != null ) { + _newEntry.Close(); + } + } + + #endregion + + #region Instance Fields + int _index; + int _readValueStart; + int _readValueLength; + + MemoryStream _newEntry; + byte[] _data; + #endregion + } +} diff --git a/src/GitHub.Api/SharpZipLib/Zip/ZipFile.cs b/src/GitHub.Api/SharpZipLib/Zip/ZipFile.cs new file mode 100644 index 000000000..b896c1b3a --- /dev/null +++ b/src/GitHub.Api/SharpZipLib/Zip/ZipFile.cs @@ -0,0 +1,4486 @@ +// ZipFile.cs +// +// Copyright (C) 2001 Mike Krueger +// Copyright (C) 2004 John Reilly +// +// This file was translated from java, it was part of the GNU Classpath +// Copyright (C) 2001 Free Software Foundation, Inc. +// +// This program is free software; you can redistribute it and/or +// modify it under the terms of the GNU General Public License +// as published by the Free Software Foundation; either version 2 +// of the License, or (at your option) any later version. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. +// +// You should have received a copy of the GNU General Public License +// along with this program; if not, write to the Free Software +// Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. +// +// Linking this library statically or dynamically with other modules is +// making a combined work based on this library. Thus, the terms and +// conditions of the GNU General Public License cover the whole +// combination. +// +// As a special exception, the copyright holders of this library give you +// permission to link this library with independent modules to produce an +// executable, regardless of the license terms of these independent +// modules, and to copy and distribute the resulting executable under +// terms of your choice, provided that you also meet, for each linked +// independent module, the terms and conditions of the license of that +// module. An independent module is a module which is not derived from +// or based on this library. If you modify this library, you may extend +// this exception to your version of the library, but you are not +// obligated to do so. If you do not wish to do so, delete this +// exception statement from your version. + +// HISTORY +// 2009-12-22 Z-1649 Added AES support +// 2010-03-02 Z-1650 Fixed updating ODT archives in memory. Exposed exceptions in updating. +// 2010-05-25 Z-1663 Fixed exception when testing local header compressed size of -1 + +using System; +using System.Collections; +using System.IO; +using System.Text; +using System.Globalization; + +#if !NETCF_1_0 +using System.Security.Cryptography; +using GitHub.ICSharpCode.SharpZipLib.Encryption; +#endif + +using GitHub.ICSharpCode.SharpZipLib.Core; +using GitHub.ICSharpCode.SharpZipLib.Checksums; +using GitHub.ICSharpCode.SharpZipLib.Zip.Compression.Streams; +using GitHub.ICSharpCode.SharpZipLib.Zip.Compression; + +namespace GitHub.ICSharpCode.SharpZipLib.Zip +{ + #region Keys Required Event Args + /// + /// Arguments used with KeysRequiredEvent + /// + public class KeysRequiredEventArgs : EventArgs + { + #region Constructors + /// + /// Initialise a new instance of + /// + /// The name of the file for which keys are required. + public KeysRequiredEventArgs(string name) + { + fileName = name; + } + + /// + /// Initialise a new instance of + /// + /// The name of the file for which keys are required. + /// The current key value. + public KeysRequiredEventArgs(string name, byte[] keyValue) + { + fileName = name; + key = keyValue; + } + + #endregion + #region Properties + /// + /// Gets the name of the file for which keys are required. + /// + public string FileName + { + get { return fileName; } + } + + /// + /// Gets or sets the key value + /// + public byte[] Key + { + get { return key; } + set { key = value; } + } + #endregion + + #region Instance Fields + string fileName; + byte[] key; + #endregion + } + #endregion + + #region Test Definitions + /// + /// The strategy to apply to testing. + /// + public enum TestStrategy + { + /// + /// Find the first error only. + /// + FindFirstError, + /// + /// Find all possible errors. + /// + FindAllErrors, + } + + /// + /// The operation in progress reported by a during testing. + /// + /// TestArchive + public enum TestOperation + { + /// + /// Setting up testing. + /// + Initialising, + + /// + /// Testing an individual entries header + /// + EntryHeader, + + /// + /// Testing an individual entries data + /// + EntryData, + + /// + /// Testing an individual entry has completed. + /// + EntryComplete, + + /// + /// Running miscellaneous tests + /// + MiscellaneousTests, + + /// + /// Testing is complete + /// + Complete, + } + + /// + /// Status returned returned by during testing. + /// + /// TestArchive + public class TestStatus + { + #region Constructors + /// + /// Initialise a new instance of + /// + /// The this status applies to. + public TestStatus(ZipFile file) + { + file_ = file; + } + #endregion + + #region Properties + + /// + /// Get the current in progress. + /// + public TestOperation Operation + { + get { return operation_; } + } + + /// + /// Get the this status is applicable to. + /// + public ZipFile File + { + get { return file_; } + } + + /// + /// Get the current/last entry tested. + /// + public ZipEntry Entry + { + get { return entry_; } + } + + /// + /// Get the number of errors detected so far. + /// + public int ErrorCount + { + get { return errorCount_; } + } + + /// + /// Get the number of bytes tested so far for the current entry. + /// + public long BytesTested + { + get { return bytesTested_; } + } + + /// + /// Get a value indicating wether the last entry test was valid. + /// + public bool EntryValid + { + get { return entryValid_; } + } + #endregion + + #region Internal API + internal void AddError() + { + errorCount_++; + entryValid_ = false; + } + + internal void SetOperation(TestOperation operation) + { + operation_ = operation; + } + + internal void SetEntry(ZipEntry entry) + { + entry_ = entry; + entryValid_ = true; + bytesTested_ = 0; + } + + internal void SetBytesTested(long value) + { + bytesTested_ = value; + } + #endregion + + #region Instance Fields + ZipFile file_; + ZipEntry entry_; + bool entryValid_; + int errorCount_; + long bytesTested_; + TestOperation operation_; + #endregion + } + + /// + /// Delegate invoked during testing if supplied indicating current progress and status. + /// + /// If the message is non-null an error has occured. If the message is null + /// the operation as found in status has started. + public delegate void ZipTestResultHandler(TestStatus status, string message); + #endregion + + #region Update Definitions + /// + /// The possible ways of applying updates to an archive. + /// + public enum FileUpdateMode + { + /// + /// Perform all updates on temporary files ensuring that the original file is saved. + /// + Safe, + /// + /// Update the archive directly, which is faster but less safe. + /// + Direct, + } + #endregion + + #region ZipFile Class + /// + /// This class represents a Zip archive. You can ask for the contained + /// entries, or get an input stream for a file entry. The entry is + /// automatically decompressed. + /// + /// You can also update the archive adding or deleting entries. + /// + /// This class is thread safe for input: You can open input streams for arbitrary + /// entries in different threads. + ///
+ ///
Author of the original java version : Jochen Hoenicke + ///
+ /// + /// + /// using System; + /// using System.Text; + /// using System.Collections; + /// using System.IO; + /// + /// using GitHub.ICSharpCode.SharpZipLib.Zip; + /// + /// class MainClass + /// { + /// static public void Main(string[] args) + /// { + /// using (ZipFile zFile = new ZipFile(args[0])) { + /// Console.WriteLine("Listing of : " + zFile.Name); + /// Console.WriteLine(""); + /// Console.WriteLine("Raw Size Size Date Time Name"); + /// Console.WriteLine("-------- -------- -------- ------ ---------"); + /// foreach (ZipEntry e in zFile) { + /// if ( e.IsFile ) { + /// DateTime d = e.DateTime; + /// Console.WriteLine("{0, -10}{1, -10}{2} {3} {4}", e.Size, e.CompressedSize, + /// d.ToString("dd-MM-yy"), d.ToString("HH:mm"), + /// e.Name); + /// } + /// } + /// } + /// } + /// } + /// + /// + public class ZipFile : IEnumerable, IDisposable + { + #region KeyHandling + + /// + /// Delegate for handling keys/password setting during compresion/decompression. + /// + public delegate void KeysRequiredEventHandler( + object sender, + KeysRequiredEventArgs e + ); + + /// + /// Event handler for handling encryption keys. + /// + public KeysRequiredEventHandler KeysRequired; + + /// + /// Handles getting of encryption keys when required. + /// + /// The file for which encryption keys are required. + void OnKeysRequired(string fileName) + { + if (KeysRequired != null) { + KeysRequiredEventArgs krea = new KeysRequiredEventArgs(fileName, key); + KeysRequired(this, krea); + key = krea.Key; + } + } + + /// + /// Get/set the encryption key value. + /// + byte[] Key + { + get { return key; } + set { key = value; } + } + +#if !NETCF_1_0 + /// + /// Password to be used for encrypting/decrypting files. + /// + /// Set to null if no password is required. + public string Password + { + set + { + if ( (value == null) || (value.Length == 0) ) { + key = null; + } + else { + rawPassword_ = value; + key = PkzipClassic.GenerateKeys(ZipConstants.ConvertToArray(value)); + } + } + } +#endif + + /// + /// Get a value indicating wether encryption keys are currently available. + /// + bool HaveKeys + { + get { return key != null; } + } + #endregion + + #region Constructors + /// + /// Opens a Zip file with the given name for reading. + /// + /// The name of the file to open. + /// The argument supplied is null. + /// + /// An i/o error occurs + /// + /// + /// The file doesn't contain a valid zip archive. + /// + public ZipFile(string name) + { + if ( name == null ) { + throw new ArgumentNullException("name"); + } + + name_ = name; + + baseStream_ = File.Open(name, FileMode.Open, FileAccess.Read, FileShare.Read); + isStreamOwner = true; + + try { + ReadEntries(); + } + catch { + DisposeInternal(true); + throw; + } + } + + /// + /// Opens a Zip file reading the given . + /// + /// The to read archive data from. + /// The supplied argument is null. + /// + /// An i/o error occurs. + /// + /// + /// The file doesn't contain a valid zip archive. + /// + public ZipFile(FileStream file) + { + if ( file == null ) { + throw new ArgumentNullException("file"); + } + + if ( !file.CanSeek ) { + throw new ArgumentException("Stream is not seekable", "file"); + } + + baseStream_ = file; + name_ = file.Name; + isStreamOwner = true; + + try { + ReadEntries(); + } + catch { + DisposeInternal(true); + throw; + } + } + + /// + /// Opens a Zip file reading the given . + /// + /// The to read archive data from. + /// + /// An i/o error occurs + /// + /// + /// The stream doesn't contain a valid zip archive.
+ ///
+ /// + /// The stream doesnt support seeking. + /// + /// + /// The stream argument is null. + /// + public ZipFile(Stream stream) + { + if ( stream == null ) { + throw new ArgumentNullException("stream"); + } + + if ( !stream.CanSeek ) { + throw new ArgumentException("Stream is not seekable", "stream"); + } + + baseStream_ = stream; + isStreamOwner = true; + + if ( baseStream_.Length > 0 ) { + try { + ReadEntries(); + } + catch { + DisposeInternal(true); + throw; + } + } else { + entries_ = new ZipEntry[0]; + isNewArchive_ = true; + } + } + + /// + /// Initialises a default instance with no entries and no file storage. + /// + internal ZipFile() + { + entries_ = new ZipEntry[0]; + isNewArchive_ = true; + } + + #endregion + + #region Destructors and Closing + /// + /// Finalize this instance. + /// + ~ZipFile() + { + Dispose(false); + } + + /// + /// Closes the ZipFile. If the stream is owned then this also closes the underlying input stream. + /// Once closed, no further instance methods should be called. + /// + /// + /// An i/o error occurs. + /// + public void Close() + { + DisposeInternal(true); + GC.SuppressFinalize(this); + } + + #endregion + + #region Creators + /// + /// Create a new whose data will be stored in a file. + /// + /// The name of the archive to create. + /// Returns the newly created + /// is null + public static ZipFile Create(string fileName) + { + if ( fileName == null ) { + throw new ArgumentNullException("fileName"); + } + + FileStream fs = File.Create(fileName); + + ZipFile result = new ZipFile(); + result.name_ = fileName; + result.baseStream_ = fs; + result.isStreamOwner = true; + return result; + } + + /// + /// Create a new whose data will be stored on a stream. + /// + /// The stream providing data storage. + /// Returns the newly created + /// is null + /// doesnt support writing. + public static ZipFile Create(Stream outStream) + { + if ( outStream == null ) { + throw new ArgumentNullException("outStream"); + } + + if ( !outStream.CanWrite ) { + throw new ArgumentException("Stream is not writeable", "outStream"); + } + + if ( !outStream.CanSeek ) { + throw new ArgumentException("Stream is not seekable", "outStream"); + } + + ZipFile result = new ZipFile(); + result.baseStream_ = outStream; + return result; + } + + #endregion + + #region Properties + /// + /// Get/set a flag indicating if the underlying stream is owned by the ZipFile instance. + /// If the flag is true then the stream will be closed when Close is called. + /// + /// + /// The default value is true in all cases. + /// + public bool IsStreamOwner + { + get { return isStreamOwner; } + set { isStreamOwner = value; } + } + + /// + /// Get a value indicating wether + /// this archive is embedded in another file or not. + /// + public bool IsEmbeddedArchive + { + // Not strictly correct in all circumstances currently + get { return offsetOfFirstEntry > 0; } + } + + /// + /// Get a value indicating that this archive is a new one. + /// + public bool IsNewArchive + { + get { return isNewArchive_; } + } + + /// + /// Gets the comment for the zip file. + /// + public string ZipFileComment + { + get { return comment_; } + } + + /// + /// Gets the name of this zip file. + /// + public string Name + { + get { return name_; } + } + + /// + /// Gets the number of entries in this zip file. + /// + /// + /// The Zip file has been closed. + /// + [Obsolete("Use the Count property instead")] + public int Size + { + get + { + return entries_.Length; + } + } + + /// + /// Get the number of entries contained in this . + /// + public long Count + { + get + { + return entries_.Length; + } + } + + /// + /// Indexer property for ZipEntries + /// + [System.Runtime.CompilerServices.IndexerNameAttribute("EntryByIndex")] + public ZipEntry this[int index] + { + get { + return (ZipEntry) entries_[index].Clone(); + } + } + + #endregion + + #region Input Handling + /// + /// Gets an enumerator for the Zip entries in this Zip file. + /// + /// Returns an for this archive. + /// + /// The Zip file has been closed. + /// + public IEnumerator GetEnumerator() + { + if (isDisposed_) { + throw new ObjectDisposedException("ZipFile"); + } + + return new ZipEntryEnumerator(entries_); + } + + /// + /// Return the index of the entry with a matching name + /// + /// Entry name to find + /// If true the comparison is case insensitive + /// The index position of the matching entry or -1 if not found + /// + /// The Zip file has been closed. + /// + public int FindEntry(string name, bool ignoreCase) + { + if (isDisposed_) { + throw new ObjectDisposedException("ZipFile"); + } + + // TODO: This will be slow as the next ice age for huge archives! + for (int i = 0; i < entries_.Length; i++) { + if (string.Compare(name, entries_[i].Name, ignoreCase, CultureInfo.InvariantCulture) == 0) { + return i; + } + } + return -1; + } + + /// + /// Searches for a zip entry in this archive with the given name. + /// String comparisons are case insensitive + /// + /// + /// The name to find. May contain directory components separated by slashes ('/'). + /// + /// + /// A clone of the zip entry, or null if no entry with that name exists. + /// + /// + /// The Zip file has been closed. + /// + public ZipEntry GetEntry(string name) + { + if (isDisposed_) { + throw new ObjectDisposedException("ZipFile"); + } + + int index = FindEntry(name, true); + return (index >= 0) ? (ZipEntry) entries_[index].Clone() : null; + } + + /// + /// Gets an input stream for reading the given zip entry data in an uncompressed form. + /// Normally the should be an entry returned by GetEntry(). + /// + /// The to obtain a data for + /// An input containing data for this + /// + /// The ZipFile has already been closed + /// + /// + /// The compression method for the entry is unknown + /// + /// + /// The entry is not found in the ZipFile + /// + public Stream GetInputStream(ZipEntry entry) + { + if ( entry == null ) { + throw new ArgumentNullException("entry"); + } + + if ( isDisposed_ ) { + throw new ObjectDisposedException("ZipFile"); + } + + long index = entry.ZipFileIndex; + if ( (index < 0) || (index >= entries_.Length) || (entries_[index].Name != entry.Name) ) { + index = FindEntry(entry.Name, true); + if (index < 0) { + throw new ZipException("Entry cannot be found"); + } + } + return GetInputStream(index); + } + + /// + /// Creates an input stream reading a zip entry + /// + /// The index of the entry to obtain an input stream for. + /// + /// An input containing data for this + /// + /// + /// The ZipFile has already been closed + /// + /// + /// The compression method for the entry is unknown + /// + /// + /// The entry is not found in the ZipFile + /// + public Stream GetInputStream(long entryIndex) + { + if ( isDisposed_ ) { + throw new ObjectDisposedException("ZipFile"); + } + + long start = LocateEntry(entries_[entryIndex]); + CompressionMethod method = entries_[entryIndex].CompressionMethod; + Stream result = new PartialInputStream(this, start, entries_[entryIndex].CompressedSize); + + if (entries_[entryIndex].IsCrypted == true) { +#if NETCF_1_0 + throw new ZipException("decryption not supported for Compact Framework 1.0"); +#else + result = CreateAndInitDecryptionStream(result, entries_[entryIndex]); + if (result == null) { + throw new ZipException("Unable to decrypt this entry"); + } +#endif + } + + switch (method) { + case CompressionMethod.Stored: + // read as is. + break; + + case CompressionMethod.Deflated: + // No need to worry about ownership and closing as underlying stream close does nothing. + result = new InflaterInputStream(result, new Inflater(true)); + break; + + default: + throw new ZipException("Unsupported compression method " + method); + } + + return result; + } + + #endregion + + #region Archive Testing + /// + /// Test an archive for integrity/validity + /// + /// Perform low level data Crc check + /// true if all tests pass, false otherwise + /// Testing will terminate on the first error found. + public bool TestArchive(bool testData) + { + return TestArchive(testData, TestStrategy.FindFirstError, null); + } + + /// + /// Test an archive for integrity/validity + /// + /// Perform low level data Crc check + /// The to apply. + /// The handler to call during testing. + /// true if all tests pass, false otherwise + /// The object has already been closed. + public bool TestArchive(bool testData, TestStrategy strategy, ZipTestResultHandler resultHandler) + { + if (isDisposed_) { + throw new ObjectDisposedException("ZipFile"); + } + + TestStatus status = new TestStatus(this); + + if ( resultHandler != null ) { + resultHandler(status, null); + } + + HeaderTest test = testData ? (HeaderTest.Header | HeaderTest.Extract) : HeaderTest.Header; + + bool testing = true; + + try { + int entryIndex = 0; + + while ( testing && (entryIndex < Count) ) { + if ( resultHandler != null ) { + status.SetEntry(this[entryIndex]); + status.SetOperation(TestOperation.EntryHeader); + resultHandler(status, null); + } + + try { + TestLocalHeader(this[entryIndex], test); + } + catch(ZipException ex) { + status.AddError(); + + if ( resultHandler != null ) { + resultHandler(status, + string.Format("Exception during test - '{0}'", ex.Message)); + } + + if ( strategy == TestStrategy.FindFirstError ) { + testing = false; + } + } + + if ( testing && testData && this[entryIndex].IsFile ) { + if ( resultHandler != null ) { + status.SetOperation(TestOperation.EntryData); + resultHandler(status, null); + } + + Crc32 crc = new Crc32(); + + using (Stream entryStream = this.GetInputStream(this[entryIndex])) + { + + byte[] buffer = new byte[4096]; + long totalBytes = 0; + int bytesRead; + while ((bytesRead = entryStream.Read(buffer, 0, buffer.Length)) > 0) + { + crc.Update(buffer, 0, bytesRead); + + if (resultHandler != null) + { + totalBytes += bytesRead; + status.SetBytesTested(totalBytes); + resultHandler(status, null); + } + } + } + + if (this[entryIndex].Crc != crc.Value) { + status.AddError(); + + if ( resultHandler != null ) { + resultHandler(status, "CRC mismatch"); + } + + if ( strategy == TestStrategy.FindFirstError ) { + testing = false; + } + } + + if (( this[entryIndex].Flags & (int)GeneralBitFlags.Descriptor) != 0 ) { + ZipHelperStream helper = new ZipHelperStream(baseStream_); + DescriptorData data = new DescriptorData(); + helper.ReadDataDescriptor(this[entryIndex].LocalHeaderRequiresZip64, data); + if (this[entryIndex].Crc != data.Crc) { + status.AddError(); + } + + if (this[entryIndex].CompressedSize != data.CompressedSize) { + status.AddError(); + } + + if (this[entryIndex].Size != data.Size) { + status.AddError(); + } + } + } + + if ( resultHandler != null ) { + status.SetOperation(TestOperation.EntryComplete); + resultHandler(status, null); + } + + entryIndex += 1; + } + + if ( resultHandler != null ) { + status.SetOperation(TestOperation.MiscellaneousTests); + resultHandler(status, null); + } + + // TODO: the 'Corrina Johns' test where local headers are missing from + // the central directory. They are therefore invisible to many archivers. + } + catch (Exception ex) { + status.AddError(); + + if ( resultHandler != null ) { + resultHandler(status, string.Format("Exception during test - '{0}'", ex.Message)); + } + } + + if ( resultHandler != null ) { + status.SetOperation(TestOperation.Complete); + status.SetEntry(null); + resultHandler(status, null); + } + + return (status.ErrorCount == 0); + } + + [Flags] + enum HeaderTest + { + Extract = 0x01, // Check that this header represents an entry whose data can be extracted + Header = 0x02, // Check that this header contents are valid + } + + /// + /// Test a local header against that provided from the central directory + /// + /// + /// The entry to test against + /// + /// The type of tests to carry out. + /// The offset of the entries data in the file + long TestLocalHeader(ZipEntry entry, HeaderTest tests) + { + lock(baseStream_) + { + bool testHeader = (tests & HeaderTest.Header) != 0; + bool testData = (tests & HeaderTest.Extract) != 0; + + baseStream_.Seek(offsetOfFirstEntry + entry.Offset, SeekOrigin.Begin); + if ((int)ReadLEUint() != ZipConstants.LocalHeaderSignature) { + throw new ZipException(string.Format("Wrong local header signature @{0:X}", offsetOfFirstEntry + entry.Offset)); + } + + short extractVersion = ( short )ReadLEUshort(); + short localFlags = ( short )ReadLEUshort(); + short compressionMethod = ( short )ReadLEUshort(); + short fileTime = ( short )ReadLEUshort(); + short fileDate = ( short )ReadLEUshort(); + uint crcValue = ReadLEUint(); + long compressedSize = ReadLEUint(); + long size = ReadLEUint(); + int storedNameLength = ReadLEUshort(); + int extraDataLength = ReadLEUshort(); + + byte[] nameData = new byte[storedNameLength]; + StreamUtils.ReadFully(baseStream_, nameData); + + byte[] extraData = new byte[extraDataLength]; + StreamUtils.ReadFully(baseStream_, extraData); + + ZipExtraData localExtraData = new ZipExtraData(extraData); + + // Extra data / zip64 checks + if (localExtraData.Find(1)) + { + // 2010-03-04 Forum 10512: removed checks for version >= ZipConstants.VersionZip64 + // and size or compressedSize = MaxValue, due to rogue creators. + + size = localExtraData.ReadLong(); + compressedSize = localExtraData.ReadLong(); + + if ((localFlags & (int)GeneralBitFlags.Descriptor) != 0) + { + // These may be valid if patched later + if ( (size != -1) && (size != entry.Size)) { + throw new ZipException("Size invalid for descriptor"); + } + + if ((compressedSize != -1) && (compressedSize != entry.CompressedSize)) { + throw new ZipException("Compressed size invalid for descriptor"); + } + } + } + else + { + // No zip64 extra data but entry requires it. + if ((extractVersion >= ZipConstants.VersionZip64) && + (((uint)size == uint.MaxValue) || ((uint)compressedSize == uint.MaxValue))) + { + throw new ZipException("Required Zip64 extended information missing"); + } + } + + if ( testData ) { + if ( entry.IsFile ) { + if ( !entry.IsCompressionMethodSupported() ) { + throw new ZipException("Compression method not supported"); + } + + if ( (extractVersion > ZipConstants.VersionMadeBy) + || ((extractVersion > 20) && (extractVersion < ZipConstants.VersionZip64)) ) { + throw new ZipException(string.Format("Version required to extract this entry not supported ({0})", extractVersion)); + } + + if ( (localFlags & ( int )(GeneralBitFlags.Patched | GeneralBitFlags.StrongEncryption | GeneralBitFlags.EnhancedCompress | GeneralBitFlags.HeaderMasked)) != 0 ) { + throw new ZipException("The library does not support the zip version required to extract this entry"); + } + } + } + + if (testHeader) + { + if ((extractVersion <= 63) && // Ignore later versions as we dont know about them.. + (extractVersion != 10) && + (extractVersion != 11) && + (extractVersion != 20) && + (extractVersion != 21) && + (extractVersion != 25) && + (extractVersion != 27) && + (extractVersion != 45) && + (extractVersion != 46) && + (extractVersion != 50) && + (extractVersion != 51) && + (extractVersion != 52) && + (extractVersion != 61) && + (extractVersion != 62) && + (extractVersion != 63) + ) + { + throw new ZipException(string.Format("Version required to extract this entry is invalid ({0})", extractVersion)); + } + + // Local entry flags dont have reserved bit set on. + if ((localFlags & (int)(GeneralBitFlags.ReservedPKware4 | GeneralBitFlags.ReservedPkware14 | GeneralBitFlags.ReservedPkware15)) != 0) + { + throw new ZipException("Reserved bit flags cannot be set."); + } + + // Encryption requires extract version >= 20 + if (((localFlags & (int)GeneralBitFlags.Encrypted) != 0) && (extractVersion < 20)) + { + throw new ZipException(string.Format("Version required to extract this entry is too low for encryption ({0})", extractVersion)); + } + + // Strong encryption requires encryption flag to be set and extract version >= 50. + if ((localFlags & (int)GeneralBitFlags.StrongEncryption) != 0) + { + if ((localFlags & (int)GeneralBitFlags.Encrypted) == 0) + { + throw new ZipException("Strong encryption flag set but encryption flag is not set"); + } + + if (extractVersion < 50) + { + throw new ZipException(string.Format("Version required to extract this entry is too low for encryption ({0})", extractVersion)); + } + } + + // Patched entries require extract version >= 27 + if (((localFlags & (int)GeneralBitFlags.Patched) != 0) && (extractVersion < 27)) + { + throw new ZipException(string.Format("Patched data requires higher version than ({0})", extractVersion)); + } + + // Central header flags match local entry flags. + if (localFlags != entry.Flags) + { + throw new ZipException("Central header/local header flags mismatch"); + } + + // Central header compression method matches local entry + if (entry.CompressionMethod != (CompressionMethod)compressionMethod) + { + throw new ZipException("Central header/local header compression method mismatch"); + } + + if (entry.Version != extractVersion) + { + throw new ZipException("Extract version mismatch"); + } + + // Strong encryption and extract version match + if ((localFlags & (int)GeneralBitFlags.StrongEncryption) != 0) + { + if (extractVersion < 62) + { + throw new ZipException("Strong encryption flag set but version not high enough"); + } + } + + if ((localFlags & (int)GeneralBitFlags.HeaderMasked) != 0) + { + if ((fileTime != 0) || (fileDate != 0)) + { + throw new ZipException("Header masked set but date/time values non-zero"); + } + } + + if ((localFlags & (int)GeneralBitFlags.Descriptor) == 0) + { + if (crcValue != (uint)entry.Crc) + { + throw new ZipException("Central header/local header crc mismatch"); + } + } + + // Crc valid for empty entry. + // This will also apply to streamed entries where size isnt known and the header cant be patched + if ((size == 0) && (compressedSize == 0)) + { + if (crcValue != 0) + { + throw new ZipException("Invalid CRC for empty entry"); + } + } + + // TODO: make test more correct... can't compare lengths as was done originally as this can fail for MBCS strings + // Assuming a code page at this point is not valid? Best is to store the name length in the ZipEntry probably + if (entry.Name.Length > storedNameLength) + { + throw new ZipException("File name length mismatch"); + } + + // Name data has already been read convert it and compare. + string localName = ZipConstants.ConvertToStringExt(localFlags, nameData); + + // Central directory and local entry name match + if (localName != entry.Name) + { + throw new ZipException("Central header and local header file name mismatch"); + } + + // Directories have zero actual size but can have compressed size + if (entry.IsDirectory) + { + if (size > 0) + { + throw new ZipException("Directory cannot have size"); + } + + // There may be other cases where the compressed size can be greater than this? + // If so until details are known we will be strict. + if (entry.IsCrypted) + { + if (compressedSize > ZipConstants.CryptoHeaderSize + 2) + { + throw new ZipException("Directory compressed size invalid"); + } + } + else if (compressedSize > 2) + { + // When not compressed the directory size can validly be 2 bytes + // if the true size wasnt known when data was originally being written. + // NOTE: Versions of the library 0.85.4 and earlier always added 2 bytes + throw new ZipException("Directory compressed size invalid"); + } + } + + if (!ZipNameTransform.IsValidName(localName, true)) + { + throw new ZipException("Name is invalid"); + } + } + + // Tests that apply to both data and header. + + // Size can be verified only if it is known in the local header. + // it will always be known in the central header. + if (((localFlags & (int)GeneralBitFlags.Descriptor) == 0) || + ((size > 0) || (compressedSize > 0))) { + + if (size != entry.Size) { + throw new ZipException( + string.Format("Size mismatch between central header({0}) and local header({1})", + entry.Size, size)); + } + + if (compressedSize != entry.CompressedSize && + compressedSize != 0xFFFFFFFF && compressedSize != -1) { + throw new ZipException( + string.Format("Compressed size mismatch between central header({0}) and local header({1})", + entry.CompressedSize, compressedSize)); + } + } + + int extraLength = storedNameLength + extraDataLength; + return offsetOfFirstEntry + entry.Offset + ZipConstants.LocalHeaderBaseSize + extraLength; + } + } + + #endregion + + #region Updating + + const int DefaultBufferSize = 4096; + + /// + /// The kind of update to apply. + /// + enum UpdateCommand + { + Copy, // Copy original file contents. + Modify, // Change encryption, compression, attributes, name, time etc, of an existing file. + Add, // Add a new file to the archive. + } + + #region Properties + /// + /// Get / set the to apply to names when updating. + /// + public INameTransform NameTransform + { + get { + return updateEntryFactory_.NameTransform; + } + + set { + updateEntryFactory_.NameTransform = value; + } + } + + /// + /// Get/set the used to generate values + /// during updates. + /// + public IEntryFactory EntryFactory + { + get { + return updateEntryFactory_; + } + + set { + if (value == null) { + updateEntryFactory_ = new ZipEntryFactory(); + } + else { + updateEntryFactory_ = value; + } + } + } + + /// + /// Get /set the buffer size to be used when updating this zip file. + /// + public int BufferSize + { + get { return bufferSize_; } + set { + if ( value < 1024 ) { +#if NETCF_1_0 + throw new ArgumentOutOfRangeException("value"); +#else + throw new ArgumentOutOfRangeException("value", "cannot be below 1024"); +#endif + } + + if ( bufferSize_ != value ) { + bufferSize_ = value; + copyBuffer_ = null; + } + } + } + + /// + /// Get a value indicating an update has been started. + /// + public bool IsUpdating + { + get { return updates_ != null; } + } + + /// + /// Get / set a value indicating how Zip64 Extension usage is determined when adding entries. + /// + public UseZip64 UseZip64 + { + get { return useZip64_; } + set { useZip64_ = value; } + } + + #endregion + + #region Immediate updating +// TBD: Direct form of updating +// +// public void Update(IEntryMatcher deleteMatcher) +// { +// } +// +// public void Update(IScanner addScanner) +// { +// } + #endregion + + #region Deferred Updating + /// + /// Begin updating this archive. + /// + /// The archive storage for use during the update. + /// The data source to utilise during updating. + /// ZipFile has been closed. + /// One of the arguments provided is null + /// ZipFile has been closed. + public void BeginUpdate(IArchiveStorage archiveStorage, IDynamicDataSource dataSource) + { + if ( archiveStorage == null ) { + throw new ArgumentNullException("archiveStorage"); + } + + if ( dataSource == null ) { + throw new ArgumentNullException("dataSource"); + } + + if ( isDisposed_ ) { + throw new ObjectDisposedException("ZipFile"); + } + + if ( IsEmbeddedArchive ) { + throw new ZipException ("Cannot update embedded/SFX archives"); + } + + archiveStorage_ = archiveStorage; + updateDataSource_ = dataSource; + + // NOTE: the baseStream_ may not currently support writing or seeking. + + updateIndex_ = new Hashtable(); + + updates_ = new ArrayList(entries_.Length); + foreach(ZipEntry entry in entries_) { + int index = updates_.Add(new ZipUpdate(entry)); + updateIndex_.Add(entry.Name, index); + } + + // We must sort by offset before using offset's calculated sizes + updates_.Sort(new UpdateComparer()); + + int idx = 0; + foreach (ZipUpdate update in updates_) { + //If last entry, there is no next entry offset to use + if (idx == updates_.Count - 1) + break; + + update.OffsetBasedSize = ((ZipUpdate)updates_[idx + 1]).Entry.Offset - update.Entry.Offset; + idx++; + } + updateCount_ = updates_.Count; + + contentsEdited_ = false; + commentEdited_ = false; + newComment_ = null; + } + + /// + /// Begin updating to this archive. + /// + /// The storage to use during the update. + public void BeginUpdate(IArchiveStorage archiveStorage) + { + BeginUpdate(archiveStorage, new DynamicDiskDataSource()); + } + + /// + /// Begin updating this archive. + /// + /// + /// + /// + public void BeginUpdate() + { + if ( Name == null ) { + BeginUpdate(new MemoryArchiveStorage(), new DynamicDiskDataSource()); + } + else { + BeginUpdate(new DiskArchiveStorage(this), new DynamicDiskDataSource()); + } + } + + /// + /// Commit current updates, updating this archive. + /// + /// + /// + /// ZipFile has been closed. + public void CommitUpdate() + { + if ( isDisposed_ ) { + throw new ObjectDisposedException("ZipFile"); + } + + CheckUpdating(); + + try { + updateIndex_.Clear(); + updateIndex_=null; + + if( contentsEdited_ ) { + RunUpdates(); + } + else if( commentEdited_ ) { + UpdateCommentOnly(); + } + else { + // Create an empty archive if none existed originally. + if( entries_.Length==0 ) { + byte[] theComment=(newComment_!=null)?newComment_.RawComment:ZipConstants.ConvertToArray(comment_); + using( ZipHelperStream zhs=new ZipHelperStream(baseStream_) ) { + zhs.WriteEndOfCentralDirectory(0, 0, 0, theComment); + } + } + } + + } + finally { + PostUpdateCleanup(); + } + } + + /// + /// Abort updating leaving the archive unchanged. + /// + /// + /// + public void AbortUpdate() + { + PostUpdateCleanup(); + } + + /// + /// Set the file comment to be recorded when the current update is commited. + /// + /// The comment to record. + /// ZipFile has been closed. + public void SetComment(string comment) + { + if ( isDisposed_ ) { + throw new ObjectDisposedException("ZipFile"); + } + + CheckUpdating(); + + newComment_ = new ZipString(comment); + + if ( newComment_.RawLength > 0xffff ) { + newComment_ = null; + throw new ZipException("Comment length exceeds maximum - 65535"); + } + + // We dont take account of the original and current comment appearing to be the same + // as encoding may be different. + commentEdited_ = true; + } + + #endregion + + #region Adding Entries + + void AddUpdate(ZipUpdate update) + { + contentsEdited_ = true; + + int index = FindExistingUpdate(update.Entry.Name); + + if (index >= 0) { + if ( updates_[index] == null ) { + updateCount_ += 1; + } + + // Direct replacement is faster than delete and add. + updates_[index] = update; + } + else { + index = updates_.Add(update); + updateCount_ += 1; + updateIndex_.Add(update.Entry.Name, index); + } + } + + /// + /// Add a new entry to the archive. + /// + /// The name of the file to add. + /// The compression method to use. + /// Ensure Unicode text is used for name and comment for this entry. + /// Argument supplied is null. + /// ZipFile has been closed. + /// Compression method is not supported. + public void Add(string fileName, CompressionMethod compressionMethod, bool useUnicodeText ) + { + if (fileName == null) { + throw new ArgumentNullException("fileName"); + } + + if ( isDisposed_ ) { + throw new ObjectDisposedException("ZipFile"); + } + + if (!ZipEntry.IsCompressionMethodSupported(compressionMethod)) { + throw new ArgumentOutOfRangeException("compressionMethod"); + } + + CheckUpdating(); + contentsEdited_ = true; + + ZipEntry entry = EntryFactory.MakeFileEntry(fileName); + entry.IsUnicodeText = useUnicodeText; + entry.CompressionMethod = compressionMethod; + + AddUpdate(new ZipUpdate(fileName, entry)); + } + + /// + /// Add a new entry to the archive. + /// + /// The name of the file to add. + /// The compression method to use. + /// ZipFile has been closed. + /// The compression method is not supported. + public void Add(string fileName, CompressionMethod compressionMethod) + { + if ( fileName == null ) { + throw new ArgumentNullException("fileName"); + } + + if ( !ZipEntry.IsCompressionMethodSupported(compressionMethod) ) { + throw new ArgumentOutOfRangeException("compressionMethod"); + } + + CheckUpdating(); + contentsEdited_ = true; + + ZipEntry entry = EntryFactory.MakeFileEntry(fileName); + entry.CompressionMethod = compressionMethod; + AddUpdate(new ZipUpdate(fileName, entry)); + } + + /// + /// Add a file to the archive. + /// + /// The name of the file to add. + /// Argument supplied is null. + public void Add(string fileName) + { + if ( fileName == null ) { + throw new ArgumentNullException("fileName"); + } + + CheckUpdating(); + AddUpdate(new ZipUpdate(fileName, EntryFactory.MakeFileEntry(fileName))); + } + + /// + /// Add a file to the archive. + /// + /// The name of the file to add. + /// The name to use for the on the Zip file created. + /// Argument supplied is null. + public void Add(string fileName, string entryName) + { + if (fileName == null) { + throw new ArgumentNullException("fileName"); + } + + if ( entryName == null ) { + throw new ArgumentNullException("entryName"); + } + + CheckUpdating(); + AddUpdate(new ZipUpdate(fileName, EntryFactory.MakeFileEntry(entryName))); + } + + + /// + /// Add a file entry with data. + /// + /// The source of the data for this entry. + /// The name to give to the entry. + public void Add(IStaticDataSource dataSource, string entryName) + { + if ( dataSource == null ) { + throw new ArgumentNullException("dataSource"); + } + + if ( entryName == null ) { + throw new ArgumentNullException("entryName"); + } + + CheckUpdating(); + AddUpdate(new ZipUpdate(dataSource, EntryFactory.MakeFileEntry(entryName, false))); + } + + /// + /// Add a file entry with data. + /// + /// The source of the data for this entry. + /// The name to give to the entry. + /// The compression method to use. + public void Add(IStaticDataSource dataSource, string entryName, CompressionMethod compressionMethod) + { + if ( dataSource == null ) { + throw new ArgumentNullException("dataSource"); + } + + if ( entryName == null ) { + throw new ArgumentNullException("entryName"); + } + + CheckUpdating(); + + ZipEntry entry = EntryFactory.MakeFileEntry(entryName, false); + entry.CompressionMethod = compressionMethod; + + AddUpdate(new ZipUpdate(dataSource, entry)); + } + + /// + /// Add a file entry with data. + /// + /// The source of the data for this entry. + /// The name to give to the entry. + /// The compression method to use. + /// Ensure Unicode text is used for name and comments for this entry. + public void Add(IStaticDataSource dataSource, string entryName, CompressionMethod compressionMethod, bool useUnicodeText) + { + if (dataSource == null) { + throw new ArgumentNullException("dataSource"); + } + + if ( entryName == null ) { + throw new ArgumentNullException("entryName"); + } + + CheckUpdating(); + + ZipEntry entry = EntryFactory.MakeFileEntry(entryName, false); + entry.IsUnicodeText = useUnicodeText; + entry.CompressionMethod = compressionMethod; + + AddUpdate(new ZipUpdate(dataSource, entry)); + } + + /// + /// Add a that contains no data. + /// + /// The entry to add. + /// This can be used to add directories, volume labels, or empty file entries. + public void Add(ZipEntry entry) + { + if ( entry == null ) { + throw new ArgumentNullException("entry"); + } + + CheckUpdating(); + + if ( (entry.Size != 0) || (entry.CompressedSize != 0) ) { + throw new ZipException("Entry cannot have any data"); + } + + AddUpdate(new ZipUpdate(UpdateCommand.Add, entry)); + } + + /// + /// Add a directory entry to the archive. + /// + /// The directory to add. + public void AddDirectory(string directoryName) + { + if ( directoryName == null ) { + throw new ArgumentNullException("directoryName"); + } + + CheckUpdating(); + + ZipEntry dirEntry = EntryFactory.MakeDirectoryEntry(directoryName); + AddUpdate(new ZipUpdate(UpdateCommand.Add, dirEntry)); + } + + #endregion + + #region Modifying Entries +/* Modify not yet ready for public consumption. + Direct modification of an entry should not overwrite original data before its read. + Safe mode is trivial in this sense. + public void Modify(ZipEntry original, ZipEntry updated) + { + if ( original == null ) { + throw new ArgumentNullException("original"); + } + + if ( updated == null ) { + throw new ArgumentNullException("updated"); + } + + CheckUpdating(); + contentsEdited_ = true; + updates_.Add(new ZipUpdate(original, updated)); + } +*/ + #endregion + + #region Deleting Entries + /// + /// Delete an entry by name + /// + /// The filename to delete + /// True if the entry was found and deleted; false otherwise. + public bool Delete(string fileName) + { + if ( fileName == null ) { + throw new ArgumentNullException("fileName"); + } + + CheckUpdating(); + + bool result = false; + int index = FindExistingUpdate(fileName); + if ( (index >= 0) && (updates_[index] != null) ) { + result = true; + contentsEdited_ = true; + updates_[index] = null; + updateCount_ -= 1; + } + else { + throw new ZipException("Cannot find entry to delete"); + } + return result; + } + + /// + /// Delete a from the archive. + /// + /// The entry to delete. + public void Delete(ZipEntry entry) + { + if ( entry == null ) { + throw new ArgumentNullException("entry"); + } + + CheckUpdating(); + + int index = FindExistingUpdate(entry); + if ( index >= 0 ) { + contentsEdited_ = true; + updates_[index] = null; + updateCount_ -= 1; + } + else { + throw new ZipException("Cannot find entry to delete"); + } + } + + #endregion + + #region Update Support + + #region Writing Values/Headers + void WriteLEShort(int value) + { + baseStream_.WriteByte(( byte )(value & 0xff)); + baseStream_.WriteByte(( byte )((value >> 8) & 0xff)); + } + + /// + /// Write an unsigned short in little endian byte order. + /// + void WriteLEUshort(ushort value) + { + baseStream_.WriteByte(( byte )(value & 0xff)); + baseStream_.WriteByte(( byte )(value >> 8)); + } + + /// + /// Write an int in little endian byte order. + /// + void WriteLEInt(int value) + { + WriteLEShort(value & 0xffff); + WriteLEShort(value >> 16); + } + + /// + /// Write an unsigned int in little endian byte order. + /// + void WriteLEUint(uint value) + { + WriteLEUshort((ushort)(value & 0xffff)); + WriteLEUshort((ushort)(value >> 16)); + } + + /// + /// Write a long in little endian byte order. + /// + void WriteLeLong(long value) + { + WriteLEInt(( int )(value & 0xffffffff)); + WriteLEInt(( int )(value >> 32)); + } + + void WriteLEUlong(ulong value) + { + WriteLEUint(( uint )(value & 0xffffffff)); + WriteLEUint(( uint )(value >> 32)); + } + + void WriteLocalEntryHeader(ZipUpdate update) + { + ZipEntry entry = update.OutEntry; + + // TODO: Local offset will require adjusting for multi-disk zip files. + entry.Offset = baseStream_.Position; + + // TODO: Need to clear any entry flags that dont make sense or throw an exception here. + if (update.Command != UpdateCommand.Copy) { + if (entry.CompressionMethod == CompressionMethod.Deflated) { + if (entry.Size == 0) { + // No need to compress - no data. + entry.CompressedSize = entry.Size; + entry.Crc = 0; + entry.CompressionMethod = CompressionMethod.Stored; + } + } + else if (entry.CompressionMethod == CompressionMethod.Stored) { + entry.Flags &= ~(int)GeneralBitFlags.Descriptor; + } + + if (HaveKeys) { + entry.IsCrypted = true; + if (entry.Crc < 0) { + entry.Flags |= (int)GeneralBitFlags.Descriptor; + } + } + else { + entry.IsCrypted = false; + } + + switch (useZip64_) { + case UseZip64.Dynamic: + if (entry.Size < 0) { + entry.ForceZip64(); + } + break; + + case UseZip64.On: + entry.ForceZip64(); + break; + + case UseZip64.Off: + // Do nothing. The entry itself may be using Zip64 independantly. + break; + } + } + + // Write the local file header + WriteLEInt(ZipConstants.LocalHeaderSignature); + + WriteLEShort(entry.Version); + WriteLEShort(entry.Flags); + + WriteLEShort((byte)entry.CompressionMethod); + WriteLEInt(( int )entry.DosTime); + + if ( !entry.HasCrc ) { + // Note patch address for updating CRC later. + update.CrcPatchOffset = baseStream_.Position; + WriteLEInt(( int )0); + } + else { + WriteLEInt(unchecked(( int )entry.Crc)); + } + + if (entry.LocalHeaderRequiresZip64) { + WriteLEInt(-1); + WriteLEInt(-1); + } + else { + if ( (entry.CompressedSize < 0) || (entry.Size < 0) ) { + update.SizePatchOffset = baseStream_.Position; + } + + WriteLEInt(( int )entry.CompressedSize); + WriteLEInt(( int )entry.Size); + } + + byte[] name = ZipConstants.ConvertToArray(entry.Flags, entry.Name); + + if ( name.Length > 0xFFFF ) { + throw new ZipException("Entry name too long."); + } + + ZipExtraData ed = new ZipExtraData(entry.ExtraData); + + if ( entry.LocalHeaderRequiresZip64 ) { + ed.StartNewEntry(); + + // Local entry header always includes size and compressed size. + // NOTE the order of these fields is reversed when compared to the normal headers! + ed.AddLeLong(entry.Size); + ed.AddLeLong(entry.CompressedSize); + ed.AddNewEntry(1); + } + else { + ed.Delete(1); + } + + entry.ExtraData = ed.GetEntryData(); + + WriteLEShort(name.Length); + WriteLEShort(entry.ExtraData.Length); + + if ( name.Length > 0 ) { + baseStream_.Write(name, 0, name.Length); + } + + if ( entry.LocalHeaderRequiresZip64 ) { + if ( !ed.Find(1) ) { + throw new ZipException("Internal error cannot find extra data"); + } + + update.SizePatchOffset = baseStream_.Position + ed.CurrentReadIndex; + } + + if ( entry.ExtraData.Length > 0 ) { + baseStream_.Write(entry.ExtraData, 0, entry.ExtraData.Length); + } + } + + int WriteCentralDirectoryHeader(ZipEntry entry) + { + if ( entry.CompressedSize < 0 ) { + throw new ZipException("Attempt to write central directory entry with unknown csize"); + } + + if ( entry.Size < 0 ) { + throw new ZipException("Attempt to write central directory entry with unknown size"); + } + + if ( entry.Crc < 0 ) { + throw new ZipException("Attempt to write central directory entry with unknown crc"); + } + + // Write the central file header + WriteLEInt(ZipConstants.CentralHeaderSignature); + + // Version made by + WriteLEShort(ZipConstants.VersionMadeBy); + + // Version required to extract + WriteLEShort(entry.Version); + + WriteLEShort(entry.Flags); + + unchecked { + WriteLEShort((byte)entry.CompressionMethod); + WriteLEInt((int)entry.DosTime); + WriteLEInt((int)entry.Crc); + } + + if ( (entry.IsZip64Forced()) || (entry.CompressedSize >= 0xffffffff) ) { + WriteLEInt(-1); + } + else { + WriteLEInt((int)(entry.CompressedSize & 0xffffffff)); + } + + if ( (entry.IsZip64Forced()) || (entry.Size >= 0xffffffff) ) { + WriteLEInt(-1); + } + else { + WriteLEInt((int)entry.Size); + } + + byte[] name = ZipConstants.ConvertToArray(entry.Flags, entry.Name); + + if ( name.Length > 0xFFFF ) { + throw new ZipException("Entry name is too long."); + } + + WriteLEShort(name.Length); + + // Central header extra data is different to local header version so regenerate. + ZipExtraData ed = new ZipExtraData(entry.ExtraData); + + if ( entry.CentralHeaderRequiresZip64 ) { + ed.StartNewEntry(); + + if ( (entry.Size >= 0xffffffff) || (useZip64_ == UseZip64.On) ) + { + ed.AddLeLong(entry.Size); + } + + if ( (entry.CompressedSize >= 0xffffffff) || (useZip64_ == UseZip64.On) ) + { + ed.AddLeLong(entry.CompressedSize); + } + + if ( entry.Offset >= 0xffffffff ) { + ed.AddLeLong(entry.Offset); + } + + // Number of disk on which this file starts isnt supported and is never written here. + ed.AddNewEntry(1); + } + else { + // Should have already be done when local header was added. + ed.Delete(1); + } + + byte[] centralExtraData = ed.GetEntryData(); + + WriteLEShort(centralExtraData.Length); + WriteLEShort(entry.Comment != null ? entry.Comment.Length : 0); + + WriteLEShort(0); // disk number + WriteLEShort(0); // internal file attributes + + // External file attributes... + if ( entry.ExternalFileAttributes != -1 ) { + WriteLEInt(entry.ExternalFileAttributes); + } + else { + if ( entry.IsDirectory ) { + WriteLEUint(16); + } + else { + WriteLEUint(0); + } + } + + if ( entry.Offset >= 0xffffffff ) { + WriteLEUint(0xffffffff); + } + else { + WriteLEUint((uint)(int)entry.Offset); + } + + if ( name.Length > 0 ) { + baseStream_.Write(name, 0, name.Length); + } + + if ( centralExtraData.Length > 0 ) { + baseStream_.Write(centralExtraData, 0, centralExtraData.Length); + } + + byte[] rawComment = (entry.Comment != null) ? Encoding.ASCII.GetBytes(entry.Comment) : new byte[0]; + + if ( rawComment.Length > 0 ) { + baseStream_.Write(rawComment, 0, rawComment.Length); + } + + return ZipConstants.CentralHeaderBaseSize + name.Length + centralExtraData.Length + rawComment.Length; + } + #endregion + + void PostUpdateCleanup() + { + updateDataSource_ = null; + updates_ = null; + updateIndex_ = null; + + if (archiveStorage_ != null) + { + archiveStorage_.Dispose(); + archiveStorage_=null; + } + } + + string GetTransformedFileName(string name) + { + INameTransform transform = NameTransform; + return (transform != null) ? + transform.TransformFile(name) : + name; + } + + string GetTransformedDirectoryName(string name) + { + INameTransform transform = NameTransform; + return (transform != null) ? + transform.TransformDirectory(name) : + name; + } + + /// + /// Get a raw memory buffer. + /// + /// Returns a raw memory buffer. + byte[] GetBuffer() + { + if ( copyBuffer_ == null ) { + copyBuffer_ = new byte[bufferSize_]; + } + return copyBuffer_; + } + + void CopyDescriptorBytes(ZipUpdate update, Stream dest, Stream source) + { + int bytesToCopy = GetDescriptorSize(update); + + if ( bytesToCopy > 0 ) { + byte[] buffer = GetBuffer(); + + while ( bytesToCopy > 0 ) { + int readSize = Math.Min(buffer.Length, bytesToCopy); + + int bytesRead = source.Read(buffer, 0, readSize); + if ( bytesRead > 0 ) { + dest.Write(buffer, 0, bytesRead); + bytesToCopy -= bytesRead; + } + else { + throw new ZipException("Unxpected end of stream"); + } + } + } + } + + void CopyBytes(ZipUpdate update, Stream destination, Stream source, + long bytesToCopy, bool updateCrc) + { + if ( destination == source ) { + throw new InvalidOperationException("Destination and source are the same"); + } + + // NOTE: Compressed size is updated elsewhere. + Crc32 crc = new Crc32(); + byte[] buffer = GetBuffer(); + + long targetBytes = bytesToCopy; + long totalBytesRead = 0; + + int bytesRead; + do { + int readSize = buffer.Length; + + if ( bytesToCopy < readSize ) { + readSize = (int)bytesToCopy; + } + + bytesRead = source.Read(buffer, 0, readSize); + if ( bytesRead > 0 ) { + if ( updateCrc ) { + crc.Update(buffer, 0, bytesRead); + } + destination.Write(buffer, 0, bytesRead); + bytesToCopy -= bytesRead; + totalBytesRead += bytesRead; + } + } + while ( (bytesRead > 0) && (bytesToCopy > 0) ); + + if ( totalBytesRead != targetBytes ) { + throw new ZipException(string.Format("Failed to copy bytes expected {0} read {1}", targetBytes, totalBytesRead)); + } + + if ( updateCrc ) { + update.OutEntry.Crc = crc.Value; + } + } + + /// + /// Get the size of the source descriptor for a . + /// + /// The update to get the size for. + /// The descriptor size, zero if there isnt one. + int GetDescriptorSize(ZipUpdate update) + { + int result = 0; + if ( (update.Entry.Flags & (int)GeneralBitFlags.Descriptor) != 0) { + result = ZipConstants.DataDescriptorSize - 4; + if ( update.Entry.LocalHeaderRequiresZip64 ) { + result = ZipConstants.Zip64DataDescriptorSize - 4; + } + } + return result; + } + + void CopyDescriptorBytesDirect(ZipUpdate update, Stream stream, ref long destinationPosition, long sourcePosition) + { + int bytesToCopy = GetDescriptorSize(update); + + while ( bytesToCopy > 0 ) { + int readSize = (int)bytesToCopy; + byte[] buffer = GetBuffer(); + + stream.Position = sourcePosition; + int bytesRead = stream.Read(buffer, 0, readSize); + if ( bytesRead > 0 ) { + stream.Position = destinationPosition; + stream.Write(buffer, 0, bytesRead); + bytesToCopy -= bytesRead; + destinationPosition += bytesRead; + sourcePosition += bytesRead; + } + else { + throw new ZipException("Unxpected end of stream"); + } + } + } + + void CopyEntryDataDirect(ZipUpdate update, Stream stream, bool updateCrc, ref long destinationPosition, ref long sourcePosition) + { + long bytesToCopy = update.Entry.CompressedSize; + + // NOTE: Compressed size is updated elsewhere. + Crc32 crc = new Crc32(); + byte[] buffer = GetBuffer(); + + long targetBytes = bytesToCopy; + long totalBytesRead = 0; + + int bytesRead; + do + { + int readSize = buffer.Length; + + if ( bytesToCopy < readSize ) { + readSize = (int)bytesToCopy; + } + + stream.Position = sourcePosition; + bytesRead = stream.Read(buffer, 0, readSize); + if ( bytesRead > 0 ) { + if ( updateCrc ) { + crc.Update(buffer, 0, bytesRead); + } + stream.Position = destinationPosition; + stream.Write(buffer, 0, bytesRead); + + destinationPosition += bytesRead; + sourcePosition += bytesRead; + bytesToCopy -= bytesRead; + totalBytesRead += bytesRead; + } + } + while ( (bytesRead > 0) && (bytesToCopy > 0) ); + + if ( totalBytesRead != targetBytes ) { + throw new ZipException(string.Format("Failed to copy bytes expected {0} read {1}", targetBytes, totalBytesRead)); + } + + if ( updateCrc ) { + update.OutEntry.Crc = crc.Value; + } + } + + int FindExistingUpdate(ZipEntry entry) + { + int result = -1; + string convertedName = GetTransformedFileName(entry.Name); + + if (updateIndex_.ContainsKey(convertedName)) { + result = (int)updateIndex_[convertedName]; + } +/* + // This is slow like the coming of the next ice age but takes less storage and may be useful + // for CF? + for (int index = 0; index < updates_.Count; ++index) + { + ZipUpdate zu = ( ZipUpdate )updates_[index]; + if ( (zu.Entry.ZipFileIndex == entry.ZipFileIndex) && + (string.Compare(convertedName, zu.Entry.Name, true, CultureInfo.InvariantCulture) == 0) ) { + result = index; + break; + } + } + */ + return result; + } + + int FindExistingUpdate(string fileName) + { + int result = -1; + + string convertedName = GetTransformedFileName(fileName); + + if (updateIndex_.ContainsKey(convertedName)) { + result = (int)updateIndex_[convertedName]; + } + +/* + // This is slow like the coming of the next ice age but takes less storage and may be useful + // for CF? + for ( int index = 0; index < updates_.Count; ++index ) { + if ( string.Compare(convertedName, (( ZipUpdate )updates_[index]).Entry.Name, + true, CultureInfo.InvariantCulture) == 0 ) { + result = index; + break; + } + } + */ + + return result; + } + + /// + /// Get an output stream for the specified + /// + /// The entry to get an output stream for. + /// The output stream obtained for the entry. + Stream GetOutputStream(ZipEntry entry) + { + Stream result = baseStream_; + + if ( entry.IsCrypted == true ) { +#if NETCF_1_0 + throw new ZipException("Encryption not supported for Compact Framework 1.0"); +#else + result = CreateAndInitEncryptionStream(result, entry); +#endif + } + + switch ( entry.CompressionMethod ) { + case CompressionMethod.Stored: + result = new UncompressedStream(result); + break; + + case CompressionMethod.Deflated: + DeflaterOutputStream dos = new DeflaterOutputStream(result, new Deflater(9, true)); + dos.IsStreamOwner = false; + result = dos; + break; + + default: + throw new ZipException("Unknown compression method " + entry.CompressionMethod); + } + return result; + } + + void AddEntry(ZipFile workFile, ZipUpdate update) + { + Stream source = null; + + if ( update.Entry.IsFile ) { + source = update.GetSource(); + + if ( source == null ) { + source = updateDataSource_.GetSource(update.Entry, update.Filename); + } + } + + if ( source != null ) { + using ( source ) { + long sourceStreamLength = source.Length; + if ( update.OutEntry.Size < 0 ) { + update.OutEntry.Size = sourceStreamLength; + } + else { + // Check for errant entries. + if ( update.OutEntry.Size != sourceStreamLength ) { + throw new ZipException("Entry size/stream size mismatch"); + } + } + + workFile.WriteLocalEntryHeader(update); + + long dataStart = workFile.baseStream_.Position; + + using ( Stream output = workFile.GetOutputStream(update.OutEntry) ) { + CopyBytes(update, output, source, sourceStreamLength, true); + } + + long dataEnd = workFile.baseStream_.Position; + update.OutEntry.CompressedSize = dataEnd - dataStart; + + if ((update.OutEntry.Flags & (int)GeneralBitFlags.Descriptor) == (int)GeneralBitFlags.Descriptor) + { + ZipHelperStream helper = new ZipHelperStream(workFile.baseStream_); + helper.WriteDataDescriptor(update.OutEntry); + } + } + } + else { + workFile.WriteLocalEntryHeader(update); + update.OutEntry.CompressedSize = 0; + } + + } + + void ModifyEntry(ZipFile workFile, ZipUpdate update) + { + workFile.WriteLocalEntryHeader(update); + long dataStart = workFile.baseStream_.Position; + + // TODO: This is slow if the changes don't effect the data!! + if ( update.Entry.IsFile && (update.Filename != null) ) { + using ( Stream output = workFile.GetOutputStream(update.OutEntry) ) { + using ( Stream source = this.GetInputStream(update.Entry) ) { + CopyBytes(update, output, source, source.Length, true); + } + } + } + + long dataEnd = workFile.baseStream_.Position; + update.Entry.CompressedSize = dataEnd - dataStart; + } + + void CopyEntryDirect(ZipFile workFile, ZipUpdate update, ref long destinationPosition) + { + bool skipOver = false; + if ( update.Entry.Offset == destinationPosition ) { + skipOver = true; + } + + if ( !skipOver ) { + baseStream_.Position = destinationPosition; + workFile.WriteLocalEntryHeader(update); + destinationPosition = baseStream_.Position; + } + + long sourcePosition = 0; + + const int NameLengthOffset = 26; + + // TODO: Add base for SFX friendly handling + long entryDataOffset = update.Entry.Offset + NameLengthOffset; + + baseStream_.Seek(entryDataOffset, SeekOrigin.Begin); + + // Clumsy way of handling retrieving the original name and extra data length for now. + // TODO: Stop re-reading name and data length in CopyEntryDirect. + uint nameLength = ReadLEUshort(); + uint extraLength = ReadLEUshort(); + + sourcePosition = baseStream_.Position + nameLength + extraLength; + + if (skipOver) { + if (update.OffsetBasedSize != -1) + destinationPosition += update.OffsetBasedSize; + else + // TODO: Find out why this calculation comes up 4 bytes short on some entries in ODT (Office Document Text) archives. + // WinZip produces a warning on these entries: + // "caution: value of lrec.csize (compressed size) changed from ..." + destinationPosition += + (sourcePosition - entryDataOffset) + NameLengthOffset + // Header size + update.Entry.CompressedSize + GetDescriptorSize(update); + } + else { + if ( update.Entry.CompressedSize > 0 ) { + CopyEntryDataDirect(update, baseStream_, false, ref destinationPosition, ref sourcePosition ); + } + CopyDescriptorBytesDirect(update, baseStream_, ref destinationPosition, sourcePosition); + } + } + + void CopyEntry(ZipFile workFile, ZipUpdate update) + { + workFile.WriteLocalEntryHeader(update); + + if ( update.Entry.CompressedSize > 0 ) { + const int NameLengthOffset = 26; + + long entryDataOffset = update.Entry.Offset + NameLengthOffset; + + // TODO: This wont work for SFX files! + baseStream_.Seek(entryDataOffset, SeekOrigin.Begin); + + uint nameLength = ReadLEUshort(); + uint extraLength = ReadLEUshort(); + + baseStream_.Seek(nameLength + extraLength, SeekOrigin.Current); + + CopyBytes(update, workFile.baseStream_, baseStream_, update.Entry.CompressedSize, false); + } + CopyDescriptorBytes(update, workFile.baseStream_, baseStream_); + } + + void Reopen(Stream source) + { + if ( source == null ) { + throw new ZipException("Failed to reopen archive - no source"); + } + + isNewArchive_ = false; + baseStream_ = source; + ReadEntries(); + } + + void Reopen() + { + if (Name == null) { + throw new InvalidOperationException("Name is not known cannot Reopen"); + } + + Reopen(File.Open(Name, FileMode.Open, FileAccess.Read, FileShare.Read)); + } + + void UpdateCommentOnly() + { + long baseLength = baseStream_.Length; + + ZipHelperStream updateFile = null; + + if ( archiveStorage_.UpdateMode == FileUpdateMode.Safe ) { + Stream copyStream = archiveStorage_.MakeTemporaryCopy(baseStream_); + updateFile = new ZipHelperStream(copyStream); + updateFile.IsStreamOwner = true; + + baseStream_.Close(); + baseStream_ = null; + } + else { + if (archiveStorage_.UpdateMode == FileUpdateMode.Direct) { + // TODO: archiveStorage wasnt originally intended for this use. + // Need to revisit this to tidy up handling as archive storage currently doesnt + // handle the original stream well. + // The problem is when using an existing zip archive with an in memory archive storage. + // The open stream wont support writing but the memory storage should open the same file not an in memory one. + + // Need to tidy up the archive storage interface and contract basically. + baseStream_ = archiveStorage_.OpenForDirectUpdate(baseStream_); + updateFile = new ZipHelperStream(baseStream_); + } + else { + baseStream_.Close(); + baseStream_ = null; + updateFile = new ZipHelperStream(Name); + } + } + + using ( updateFile ) { + long locatedCentralDirOffset = + updateFile.LocateBlockWithSignature(ZipConstants.EndOfCentralDirectorySignature, + baseLength, ZipConstants.EndOfCentralRecordBaseSize, 0xffff); + if ( locatedCentralDirOffset < 0 ) { + throw new ZipException("Cannot find central directory"); + } + + const int CentralHeaderCommentSizeOffset = 16; + updateFile.Position += CentralHeaderCommentSizeOffset; + + byte[] rawComment = newComment_.RawComment; + + updateFile.WriteLEShort(rawComment.Length); + updateFile.Write(rawComment, 0, rawComment.Length); + updateFile.SetLength(updateFile.Position); + } + + if ( archiveStorage_.UpdateMode == FileUpdateMode.Safe ) { + Reopen(archiveStorage_.ConvertTemporaryToFinal()); + } + else { + ReadEntries(); + } + } + + /// + /// Class used to sort updates. + /// + class UpdateComparer : IComparer + { + /// + /// Compares two objects and returns a value indicating whether one is + /// less than, equal to or greater than the other. + /// + /// First object to compare + /// Second object to compare. + /// Compare result. + public int Compare( + object x, + object y) + { + ZipUpdate zx = x as ZipUpdate; + ZipUpdate zy = y as ZipUpdate; + + int result; + + if (zx == null) { + if (zy == null) { + result = 0; + } + else { + result = -1; + } + } + else if (zy == null) { + result = 1; + } + else { + int xCmdValue = ((zx.Command == UpdateCommand.Copy) || (zx.Command == UpdateCommand.Modify)) ? 0 : 1; + int yCmdValue = ((zy.Command == UpdateCommand.Copy) || (zy.Command == UpdateCommand.Modify)) ? 0 : 1; + + result = xCmdValue - yCmdValue; + if (result == 0) { + long offsetDiff = zx.Entry.Offset - zy.Entry.Offset; + if (offsetDiff < 0) { + result = -1; + } + else if (offsetDiff == 0) { + result = 0; + } + else { + result = 1; + } + } + } + return result; + } + } + + void RunUpdates() + { + long sizeEntries = 0; + long endOfStream = 0; + bool directUpdate = false; + long destinationPosition = 0; // NOT SFX friendly + + ZipFile workFile; + + if ( IsNewArchive ) { + workFile = this; + workFile.baseStream_.Position = 0; + directUpdate = true; + } + else if ( archiveStorage_.UpdateMode == FileUpdateMode.Direct ) { + workFile = this; + workFile.baseStream_.Position = 0; + directUpdate = true; + + // Sort the updates by offset within copies/modifies, then adds. + // This ensures that data required by copies will not be overwritten. + updates_.Sort(new UpdateComparer()); + } + else { + workFile = ZipFile.Create(archiveStorage_.GetTemporaryOutput()); + workFile.UseZip64 = UseZip64; + + if (key != null) { + workFile.key = (byte[])key.Clone(); + } + } + + try { + foreach ( ZipUpdate update in updates_ ) { + if (update != null) { + switch (update.Command) { + case UpdateCommand.Copy: + if (directUpdate) { + CopyEntryDirect(workFile, update, ref destinationPosition); + } + else { + CopyEntry(workFile, update); + } + break; + + case UpdateCommand.Modify: + // TODO: Direct modifying of an entry will take some legwork. + ModifyEntry(workFile, update); + break; + + case UpdateCommand.Add: + if (!IsNewArchive && directUpdate) { + workFile.baseStream_.Position = destinationPosition; + } + + AddEntry(workFile, update); + + if (directUpdate) { + destinationPosition = workFile.baseStream_.Position; + } + break; + } + } + } + + if ( !IsNewArchive && directUpdate ) { + workFile.baseStream_.Position = destinationPosition; + } + + long centralDirOffset = workFile.baseStream_.Position; + + foreach ( ZipUpdate update in updates_ ) { + if (update != null) { + sizeEntries += workFile.WriteCentralDirectoryHeader(update.OutEntry); + } + } + + byte[] theComment = (newComment_ != null) ? newComment_.RawComment : ZipConstants.ConvertToArray(comment_); + using ( ZipHelperStream zhs = new ZipHelperStream(workFile.baseStream_) ) { + zhs.WriteEndOfCentralDirectory(updateCount_, sizeEntries, centralDirOffset, theComment); + } + + endOfStream = workFile.baseStream_.Position; + + // And now patch entries... + foreach ( ZipUpdate update in updates_ ) { + if (update != null) + { + // If the size of the entry is zero leave the crc as 0 as well. + // The calculated crc will be all bits on... + if ((update.CrcPatchOffset > 0) && (update.OutEntry.CompressedSize > 0)) { + workFile.baseStream_.Position = update.CrcPatchOffset; + workFile.WriteLEInt((int)update.OutEntry.Crc); + } + + if (update.SizePatchOffset > 0) { + workFile.baseStream_.Position = update.SizePatchOffset; + if (update.OutEntry.LocalHeaderRequiresZip64) { + workFile.WriteLeLong(update.OutEntry.Size); + workFile.WriteLeLong(update.OutEntry.CompressedSize); + } + else { + workFile.WriteLEInt((int)update.OutEntry.CompressedSize); + workFile.WriteLEInt((int)update.OutEntry.Size); + } + } + } + } + } + catch { + workFile.Close(); + if (!directUpdate && (workFile.Name != null)) { + File.Delete(workFile.Name); + } + throw; + } + + if (directUpdate) { + workFile.baseStream_.SetLength(endOfStream); + workFile.baseStream_.Flush(); + isNewArchive_ = false; + ReadEntries(); + } + else { + baseStream_.Close(); + Reopen(archiveStorage_.ConvertTemporaryToFinal()); + } + } + + void CheckUpdating() + { + if ( updates_ == null ) { + throw new InvalidOperationException("BeginUpdate has not been called"); + } + } + + #endregion + + #region ZipUpdate class + /// + /// Represents a pending update to a Zip file. + /// + class ZipUpdate + { + #region Constructors + public ZipUpdate(string fileName, ZipEntry entry) + { + command_ = UpdateCommand.Add; + entry_ = entry; + filename_ = fileName; + } + + [Obsolete] + public ZipUpdate(string fileName, string entryName, CompressionMethod compressionMethod) + { + command_ = UpdateCommand.Add; + entry_ = new ZipEntry(entryName); + entry_.CompressionMethod = compressionMethod; + filename_ = fileName; + } + + [Obsolete] + public ZipUpdate(string fileName, string entryName) + : this(fileName, entryName, CompressionMethod.Deflated) + { + // Do nothing. + } + + [Obsolete] + public ZipUpdate(IStaticDataSource dataSource, string entryName, CompressionMethod compressionMethod) + { + command_ = UpdateCommand.Add; + entry_ = new ZipEntry(entryName); + entry_.CompressionMethod = compressionMethod; + dataSource_ = dataSource; + } + + public ZipUpdate(IStaticDataSource dataSource, ZipEntry entry) + { + command_ = UpdateCommand.Add; + entry_ = entry; + dataSource_ = dataSource; + } + + public ZipUpdate(ZipEntry original, ZipEntry updated) + { + throw new ZipException("Modify not currently supported"); + /* + command_ = UpdateCommand.Modify; + entry_ = ( ZipEntry )original.Clone(); + outEntry_ = ( ZipEntry )updated.Clone(); + */ + } + + public ZipUpdate(UpdateCommand command, ZipEntry entry) + { + command_ = command; + entry_ = ( ZipEntry )entry.Clone(); + } + + + /// + /// Copy an existing entry. + /// + /// The existing entry to copy. + public ZipUpdate(ZipEntry entry) + : this(UpdateCommand.Copy, entry) + { + // Do nothing. + } + #endregion + + /// + /// Get the for this update. + /// + /// This is the source or original entry. + public ZipEntry Entry + { + get { return entry_; } + } + + /// + /// Get the that will be written to the updated/new file. + /// + public ZipEntry OutEntry + { + get { + if ( outEntry_ == null ) { + outEntry_ = (ZipEntry)entry_.Clone(); + } + + return outEntry_; + } + } + + /// + /// Get the command for this update. + /// + public UpdateCommand Command + { + get { return command_; } + } + + /// + /// Get the filename if any for this update. Null if none exists. + /// + public string Filename + { + get { return filename_; } + } + + /// + /// Get/set the location of the size patch for this update. + /// + public long SizePatchOffset + { + get { return sizePatchOffset_; } + set { sizePatchOffset_ = value; } + } + + /// + /// Get /set the location of the crc patch for this update. + /// + public long CrcPatchOffset + { + get { return crcPatchOffset_; } + set { crcPatchOffset_ = value; } + } + + /// + /// Get/set the size calculated by offset. + /// Specifically, the difference between this and next entry's starting offset. + /// + public long OffsetBasedSize + { + get { return _offsetBasedSize; } + set { _offsetBasedSize = value; } + } + + public Stream GetSource() + { + Stream result = null; + if ( dataSource_ != null ) { + result = dataSource_.GetSource(); + } + + return result; + } + + #region Instance Fields + ZipEntry entry_; + ZipEntry outEntry_; + UpdateCommand command_; + IStaticDataSource dataSource_; + string filename_; + long sizePatchOffset_ = -1; + long crcPatchOffset_ = -1; + long _offsetBasedSize = -1; + #endregion + } + + #endregion + #endregion + + #region Disposing + + #region IDisposable Members + void IDisposable.Dispose() + { + Close(); + } + #endregion + + void DisposeInternal(bool disposing) + { + if ( !isDisposed_ ) { + isDisposed_ = true; + entries_ = new ZipEntry[0]; + + if ( IsStreamOwner && (baseStream_ != null) ) { + lock(baseStream_) { + baseStream_.Close(); + } + } + + PostUpdateCleanup(); + } + } + + /// + /// Releases the unmanaged resources used by the this instance and optionally releases the managed resources. + /// + /// true to release both managed and unmanaged resources; + /// false to release only unmanaged resources. + protected virtual void Dispose(bool disposing) + { + DisposeInternal(disposing); + } + + #endregion + + #region Internal routines + #region Reading + /// + /// Read an unsigned short in little endian byte order. + /// + /// Returns the value read. + /// + /// The stream ends prematurely + /// + ushort ReadLEUshort() + { + int data1 = baseStream_.ReadByte(); + + if ( data1 < 0 ) { + throw new EndOfStreamException("End of stream"); + } + + int data2 = baseStream_.ReadByte(); + + if ( data2 < 0 ) { + throw new EndOfStreamException("End of stream"); + } + + + return unchecked((ushort)((ushort)data1 | (ushort)(data2 << 8))); + } + + /// + /// Read a uint in little endian byte order. + /// + /// Returns the value read. + /// + /// An i/o error occurs. + /// + /// + /// The file ends prematurely + /// + uint ReadLEUint() + { + return (uint)(ReadLEUshort() | (ReadLEUshort() << 16)); + } + + ulong ReadLEUlong() + { + return ReadLEUint() | ((ulong)ReadLEUint() << 32); + } + + #endregion + // NOTE this returns the offset of the first byte after the signature. + long LocateBlockWithSignature(int signature, long endLocation, int minimumBlockSize, int maximumVariableData) + { + using ( ZipHelperStream les = new ZipHelperStream(baseStream_) ) { + return les.LocateBlockWithSignature(signature, endLocation, minimumBlockSize, maximumVariableData); + } + } + + /// + /// Search for and read the central directory of a zip file filling the entries array. + /// + /// + /// An i/o error occurs. + /// + /// + /// The central directory is malformed or cannot be found + /// + void ReadEntries() + { + // Search for the End Of Central Directory. When a zip comment is + // present the directory will start earlier + // + // The search is limited to 64K which is the maximum size of a trailing comment field to aid speed. + // This should be compatible with both SFX and ZIP files but has only been tested for Zip files + // If a SFX file has the Zip data attached as a resource and there are other resources occuring later then + // this could be invalid. + // Could also speed this up by reading memory in larger blocks. + + if (baseStream_.CanSeek == false) { + throw new ZipException("ZipFile stream must be seekable"); + } + + long locatedEndOfCentralDir = LocateBlockWithSignature(ZipConstants.EndOfCentralDirectorySignature, + baseStream_.Length, ZipConstants.EndOfCentralRecordBaseSize, 0xffff); + + if (locatedEndOfCentralDir < 0) { + throw new ZipException("Cannot find central directory"); + } + + // Read end of central directory record + ushort thisDiskNumber = ReadLEUshort(); + ushort startCentralDirDisk = ReadLEUshort(); + ulong entriesForThisDisk = ReadLEUshort(); + ulong entriesForWholeCentralDir = ReadLEUshort(); + ulong centralDirSize = ReadLEUint(); + long offsetOfCentralDir = ReadLEUint(); + uint commentSize = ReadLEUshort(); + + if ( commentSize > 0 ) { + byte[] comment = new byte[commentSize]; + + StreamUtils.ReadFully(baseStream_, comment); + comment_ = ZipConstants.ConvertToString(comment); + } + else { + comment_ = string.Empty; + } + + bool isZip64 = false; + + // Check if zip64 header information is required. + if ( (thisDiskNumber == 0xffff) || + (startCentralDirDisk == 0xffff) || + (entriesForThisDisk == 0xffff) || + (entriesForWholeCentralDir == 0xffff) || + (centralDirSize == 0xffffffff) || + (offsetOfCentralDir == 0xffffffff) ) { + isZip64 = true; + + long offset = LocateBlockWithSignature(ZipConstants.Zip64CentralDirLocatorSignature, locatedEndOfCentralDir, 0, 0x1000); + if ( offset < 0 ) { + throw new ZipException("Cannot find Zip64 locator"); + } + + // number of the disk with the start of the zip64 end of central directory 4 bytes + // relative offset of the zip64 end of central directory record 8 bytes + // total number of disks 4 bytes + ReadLEUint(); // startDisk64 is not currently used + ulong offset64 = ReadLEUlong(); + uint totalDisks = ReadLEUint(); + + baseStream_.Position = (long)offset64; + long sig64 = ReadLEUint(); + + if ( sig64 != ZipConstants.Zip64CentralFileHeaderSignature ) { + throw new ZipException(string.Format("Invalid Zip64 Central directory signature at {0:X}", offset64)); + } + + // NOTE: Record size = SizeOfFixedFields + SizeOfVariableData - 12. + ulong recordSize = ReadLEUlong(); + int versionMadeBy = ReadLEUshort(); + int versionToExtract = ReadLEUshort(); + uint thisDisk = ReadLEUint(); + uint centralDirDisk = ReadLEUint(); + entriesForThisDisk = ReadLEUlong(); + entriesForWholeCentralDir = ReadLEUlong(); + centralDirSize = ReadLEUlong(); + offsetOfCentralDir = (long)ReadLEUlong(); + + // NOTE: zip64 extensible data sector (variable size) is ignored. + } + + entries_ = new ZipEntry[entriesForThisDisk]; + + // SFX/embedded support, find the offset of the first entry vis the start of the stream + // This applies to Zip files that are appended to the end of an SFX stub. + // Or are appended as a resource to an executable. + // Zip files created by some archivers have the offsets altered to reflect the true offsets + // and so dont require any adjustment here... + // TODO: Difficulty with Zip64 and SFX offset handling needs resolution - maths? + if ( !isZip64 && (offsetOfCentralDir < locatedEndOfCentralDir - (4 + (long)centralDirSize)) ) { + offsetOfFirstEntry = locatedEndOfCentralDir - (4 + (long)centralDirSize + offsetOfCentralDir); + if (offsetOfFirstEntry <= 0) { + throw new ZipException("Invalid embedded zip archive"); + } + } + + baseStream_.Seek(offsetOfFirstEntry + offsetOfCentralDir, SeekOrigin.Begin); + + for (ulong i = 0; i < entriesForThisDisk; i++) { + if (ReadLEUint() != ZipConstants.CentralHeaderSignature) { + throw new ZipException("Wrong Central Directory signature"); + } + + int versionMadeBy = ReadLEUshort(); + int versionToExtract = ReadLEUshort(); + int bitFlags = ReadLEUshort(); + int method = ReadLEUshort(); + uint dostime = ReadLEUint(); + uint crc = ReadLEUint(); + long csize = (long)ReadLEUint(); + long size = (long)ReadLEUint(); + int nameLen = ReadLEUshort(); + int extraLen = ReadLEUshort(); + int commentLen = ReadLEUshort(); + + int diskStartNo = ReadLEUshort(); // Not currently used + int internalAttributes = ReadLEUshort(); // Not currently used + + uint externalAttributes = ReadLEUint(); + long offset = ReadLEUint(); + + byte[] buffer = new byte[Math.Max(nameLen, commentLen)]; + + StreamUtils.ReadFully(baseStream_, buffer, 0, nameLen); + string name = ZipConstants.ConvertToStringExt(bitFlags, buffer, nameLen); + + ZipEntry entry = new ZipEntry(name, versionToExtract, versionMadeBy, (CompressionMethod)method); + entry.Crc = crc & 0xffffffffL; + entry.Size = size & 0xffffffffL; + entry.CompressedSize = csize & 0xffffffffL; + entry.Flags = bitFlags; + entry.DosTime = (uint)dostime; + entry.ZipFileIndex = (long)i; + entry.Offset = offset; + entry.ExternalFileAttributes = (int)externalAttributes; + + if ((bitFlags & 8) == 0) { + entry.CryptoCheckValue = (byte)(crc >> 24); + } + else { + entry.CryptoCheckValue = (byte)((dostime >> 8) & 0xff); + } + + if (extraLen > 0) { + byte[] extra = new byte[extraLen]; + StreamUtils.ReadFully(baseStream_, extra); + entry.ExtraData = extra; + } + + entry.ProcessExtraData(false); + + if (commentLen > 0) { + StreamUtils.ReadFully(baseStream_, buffer, 0, commentLen); + entry.Comment = ZipConstants.ConvertToStringExt(bitFlags, buffer, commentLen); + } + + entries_[i] = entry; + } + } + + /// + /// Locate the data for a given entry. + /// + /// + /// The start offset of the data. + /// + /// + /// The stream ends prematurely + /// + /// + /// The local header signature is invalid, the entry and central header file name lengths are different + /// or the local and entry compression methods dont match + /// + long LocateEntry(ZipEntry entry) + { + return TestLocalHeader(entry, HeaderTest.Extract); + } + +#if !NETCF_1_0 + Stream CreateAndInitDecryptionStream(Stream baseStream, ZipEntry entry) + { + CryptoStream result = null; + + if ( (entry.Version < ZipConstants.VersionStrongEncryption) + || (entry.Flags & (int)GeneralBitFlags.StrongEncryption) == 0) { + PkzipClassicManaged classicManaged = new PkzipClassicManaged(); + + OnKeysRequired(entry.Name); + if (HaveKeys == false) { + throw new ZipException("No password available for encrypted stream"); + } + + result = new CryptoStream(baseStream, classicManaged.CreateDecryptor(key, null), CryptoStreamMode.Read); + CheckClassicPassword(result, entry); + } + else { +#if !NET_1_1 && !NETCF_2_0 + if (entry.Version == ZipConstants.VERSION_AES) { + // + OnKeysRequired(entry.Name); + if (HaveKeys == false) { + throw new ZipException("No password available for AES encrypted stream"); + } + int saltLen = entry.AESSaltLen; + byte[] saltBytes = new byte[saltLen]; + int saltIn = baseStream.Read(saltBytes, 0, saltLen); + if (saltIn != saltLen) + throw new ZipException("AES Salt expected " + saltLen + " got " + saltIn); + // + byte[] pwdVerifyRead = new byte[2]; + baseStream.Read(pwdVerifyRead, 0, 2); + int blockSize = entry.AESKeySize / 8; // bits to bytes + + ZipAESTransform decryptor = new ZipAESTransform(rawPassword_, saltBytes, blockSize, false); + byte[] pwdVerifyCalc = decryptor.PwdVerifier; + if (pwdVerifyCalc[0] != pwdVerifyRead[0] || pwdVerifyCalc[1] != pwdVerifyRead[1]) + throw new Exception("Invalid password for AES"); + result = new ZipAESStream(baseStream, decryptor, CryptoStreamMode.Read); + } + else +#endif + { + throw new ZipException("Decryption method not supported"); + } + } + + return result; + } + + Stream CreateAndInitEncryptionStream(Stream baseStream, ZipEntry entry) + { + CryptoStream result = null; + if ( (entry.Version < ZipConstants.VersionStrongEncryption) + || (entry.Flags & (int)GeneralBitFlags.StrongEncryption) == 0) { + PkzipClassicManaged classicManaged = new PkzipClassicManaged(); + + OnKeysRequired(entry.Name); + if (HaveKeys == false) { + throw new ZipException("No password available for encrypted stream"); + } + + // Closing a CryptoStream will close the base stream as well so wrap it in an UncompressedStream + // which doesnt do this. + result = new CryptoStream(new UncompressedStream(baseStream), + classicManaged.CreateEncryptor(key, null), CryptoStreamMode.Write); + + if ( (entry.Crc < 0) || (entry.Flags & 8) != 0) { + WriteEncryptionHeader(result, entry.DosTime << 16); + } + else { + WriteEncryptionHeader(result, entry.Crc); + } + } + return result; + } + + static void CheckClassicPassword(CryptoStream classicCryptoStream, ZipEntry entry) + { + byte[] cryptbuffer = new byte[ZipConstants.CryptoHeaderSize]; + StreamUtils.ReadFully(classicCryptoStream, cryptbuffer); + if (cryptbuffer[ZipConstants.CryptoHeaderSize - 1] != entry.CryptoCheckValue) { + throw new ZipException("Invalid password"); + } + } +#endif + + static void WriteEncryptionHeader(Stream stream, long crcValue) + { + byte[] cryptBuffer = new byte[ZipConstants.CryptoHeaderSize]; + Random rnd = new Random(); + rnd.NextBytes(cryptBuffer); + cryptBuffer[11] = (byte)(crcValue >> 24); + stream.Write(cryptBuffer, 0, cryptBuffer.Length); + } + + #endregion + + #region Instance Fields + bool isDisposed_; + string name_; + string comment_; + string rawPassword_; + Stream baseStream_; + bool isStreamOwner; + long offsetOfFirstEntry; + ZipEntry[] entries_; + byte[] key; + bool isNewArchive_; + + // Default is dynamic which is not backwards compatible and can cause problems + // with XP's built in compression which cant read Zip64 archives. + // However it does avoid the situation were a large file is added and cannot be completed correctly. + // Hint: Set always ZipEntry size before they are added to an archive and this setting isnt needed. + UseZip64 useZip64_ = UseZip64.Dynamic ; + + #region Zip Update Instance Fields + ArrayList updates_; + long updateCount_; // Count is managed manually as updates_ can contain nulls! + Hashtable updateIndex_; + IArchiveStorage archiveStorage_; + IDynamicDataSource updateDataSource_; + bool contentsEdited_; + int bufferSize_ = DefaultBufferSize; + byte[] copyBuffer_; + ZipString newComment_; + bool commentEdited_; + IEntryFactory updateEntryFactory_ = new ZipEntryFactory(); + #endregion + #endregion + + #region Support Classes + /// + /// Represents a string from a which is stored as an array of bytes. + /// + class ZipString + { + #region Constructors + /// + /// Initialise a with a string. + /// + /// The textual string form. + public ZipString(string comment) + { + comment_ = comment; + isSourceString_ = true; + } + + /// + /// Initialise a using a string in its binary 'raw' form. + /// + /// + public ZipString(byte[] rawString) + { + rawComment_ = rawString; + } + #endregion + + /// + /// Get a value indicating the original source of data for this instance. + /// True if the source was a string; false if the source was binary data. + /// + public bool IsSourceString + { + get { return isSourceString_; } + } + + /// + /// Get the length of the comment when represented as raw bytes. + /// + public int RawLength + { + get { + MakeBytesAvailable(); + return rawComment_.Length; + } + } + + /// + /// Get the comment in its 'raw' form as plain bytes. + /// + public byte[] RawComment + { + get { + MakeBytesAvailable(); + return (byte[])rawComment_.Clone(); + } + } + + /// + /// Reset the comment to its initial state. + /// + public void Reset() + { + if ( isSourceString_ ) { + rawComment_ = null; + } + else { + comment_ = null; + } + } + + void MakeTextAvailable() + { + if ( comment_ == null ) { + comment_ = ZipConstants.ConvertToString(rawComment_); + } + } + + void MakeBytesAvailable() + { + if ( rawComment_ == null ) { + rawComment_ = ZipConstants.ConvertToArray(comment_); + } + } + + /// + /// Implicit conversion of comment to a string. + /// + /// The to convert to a string. + /// The textual equivalent for the input value. + static public implicit operator string(ZipString zipString) + { + zipString.MakeTextAvailable(); + return zipString.comment_; + } + + #region Instance Fields + string comment_; + byte[] rawComment_; + bool isSourceString_; + #endregion + } + + /// + /// An enumerator for Zip entries + /// + class ZipEntryEnumerator : IEnumerator + { + #region Constructors + public ZipEntryEnumerator(ZipEntry[] entries) + { + array = entries; + } + + #endregion + #region IEnumerator Members + public object Current + { + get { + return array[index]; + } + } + + public void Reset() + { + index = -1; + } + + public bool MoveNext() + { + return (++index < array.Length); + } + #endregion + #region Instance Fields + ZipEntry[] array; + int index = -1; + #endregion + } + + /// + /// An is a stream that you can write uncompressed data + /// to and flush, but cannot read, seek or do anything else to. + /// + class UncompressedStream : Stream + { + #region Constructors + public UncompressedStream(Stream baseStream) + { + baseStream_ = baseStream; + } + + #endregion + + /// + /// Close this stream instance. + /// + public override void Close() + { + // Do nothing + } + + /// + /// Gets a value indicating whether the current stream supports reading. + /// + public override bool CanRead + { + get { + return false; + } + } + + /// + /// Write any buffered data to underlying storage. + /// + public override void Flush() + { + baseStream_.Flush(); + } + + /// + /// Gets a value indicating whether the current stream supports writing. + /// + public override bool CanWrite + { + get { + return baseStream_.CanWrite; + } + } + + /// + /// Gets a value indicating whether the current stream supports seeking. + /// + public override bool CanSeek + { + get { + return false; + } + } + + /// + /// Get the length in bytes of the stream. + /// + public override long Length + { + get { + return 0; + } + } + + /// + /// Gets or sets the position within the current stream. + /// + public override long Position + { + get { + return baseStream_.Position; + } + + set + { + } + } + + /// + /// Reads a sequence of bytes from the current stream and advances the position within the stream by the number of bytes read. + /// + /// An array of bytes. When this method returns, the buffer contains the specified byte array with the values between offset and (offset + count - 1) replaced by the bytes read from the current source. + /// The zero-based byte offset in buffer at which to begin storing the data read from the current stream. + /// The maximum number of bytes to be read from the current stream. + /// + /// The total number of bytes read into the buffer. This can be less than the number of bytes requested if that many bytes are not currently available, or zero (0) if the end of the stream has been reached. + /// + /// The sum of offset and count is larger than the buffer length. + /// Methods were called after the stream was closed. + /// The stream does not support reading. + /// buffer is null. + /// An I/O error occurs. + /// offset or count is negative. + public override int Read(byte[] buffer, int offset, int count) + { + return 0; + } + + /// + /// Sets the position within the current stream. + /// + /// A byte offset relative to the origin parameter. + /// A value of type indicating the reference point used to obtain the new position. + /// + /// The new position within the current stream. + /// + /// An I/O error occurs. + /// The stream does not support seeking, such as if the stream is constructed from a pipe or console output. + /// Methods were called after the stream was closed. + public override long Seek(long offset, SeekOrigin origin) + { + return 0; + } + + /// + /// Sets the length of the current stream. + /// + /// The desired length of the current stream in bytes. + /// The stream does not support both writing and seeking, such as if the stream is constructed from a pipe or console output. + /// An I/O error occurs. + /// Methods were called after the stream was closed. + public override void SetLength(long value) + { + } + + /// + /// Writes a sequence of bytes to the current stream and advances the current position within this stream by the number of bytes written. + /// + /// An array of bytes. This method copies count bytes from buffer to the current stream. + /// The zero-based byte offset in buffer at which to begin copying bytes to the current stream. + /// The number of bytes to be written to the current stream. + /// An I/O error occurs. + /// The stream does not support writing. + /// Methods were called after the stream was closed. + /// buffer is null. + /// The sum of offset and count is greater than the buffer length. + /// offset or count is negative. + public override void Write(byte[] buffer, int offset, int count) + { + baseStream_.Write(buffer, offset, count); + } + + #region Instance Fields + Stream baseStream_; + #endregion + } + + /// + /// A is an + /// whose data is only a part or subsection of a file. + /// + class PartialInputStream : Stream + { + #region Constructors + /// + /// Initialise a new instance of the class. + /// + /// The containing the underlying stream to use for IO. + /// The start of the partial data. + /// The length of the partial data. + public PartialInputStream(ZipFile zipFile, long start, long length) + { + start_ = start; + length_ = length; + + // Although this is the only time the zipfile is used + // keeping a reference here prevents premature closure of + // this zip file and thus the baseStream_. + + // Code like this will cause apparently random failures depending + // on the size of the files and when garbage is collected. + // + // ZipFile z = new ZipFile (stream); + // Stream reader = z.GetInputStream(0); + // uses reader here.... + zipFile_ = zipFile; + baseStream_ = zipFile_.baseStream_; + readPos_ = start; + end_ = start + length; + } + #endregion + + /// + /// Read a byte from this stream. + /// + /// Returns the byte read or -1 on end of stream. + public override int ReadByte() + { + if (readPos_ >= end_) { + // -1 is the correct value at end of stream. + return -1; + } + + lock( baseStream_ ) { + baseStream_.Seek(readPos_++, SeekOrigin.Begin); + return baseStream_.ReadByte(); + } + } + + /// + /// Close this partial input stream. + /// + /// + /// The underlying stream is not closed. Close the parent ZipFile class to do that. + /// + public override void Close() + { + // Do nothing at all! + } + + /// + /// Reads a sequence of bytes from the current stream and advances the position within the stream by the number of bytes read. + /// + /// An array of bytes. When this method returns, the buffer contains the specified byte array with the values between offset and (offset + count - 1) replaced by the bytes read from the current source. + /// The zero-based byte offset in buffer at which to begin storing the data read from the current stream. + /// The maximum number of bytes to be read from the current stream. + /// + /// The total number of bytes read into the buffer. This can be less than the number of bytes requested if that many bytes are not currently available, or zero (0) if the end of the stream has been reached. + /// + /// The sum of offset and count is larger than the buffer length. + /// Methods were called after the stream was closed. + /// The stream does not support reading. + /// buffer is null. + /// An I/O error occurs. + /// offset or count is negative. + public override int Read(byte[] buffer, int offset, int count) + { + lock(baseStream_) { + if (count > end_ - readPos_) { + count = (int) (end_ - readPos_); + if (count == 0) { + return 0; + } + } + + baseStream_.Seek(readPos_, SeekOrigin.Begin); + int readCount = baseStream_.Read(buffer, offset, count); + if (readCount > 0) { + readPos_ += readCount; + } + return readCount; + } + } + + /// + /// Writes a sequence of bytes to the current stream and advances the current position within this stream by the number of bytes written. + /// + /// An array of bytes. This method copies count bytes from buffer to the current stream. + /// The zero-based byte offset in buffer at which to begin copying bytes to the current stream. + /// The number of bytes to be written to the current stream. + /// An I/O error occurs. + /// The stream does not support writing. + /// Methods were called after the stream was closed. + /// buffer is null. + /// The sum of offset and count is greater than the buffer length. + /// offset or count is negative. + public override void Write(byte[] buffer, int offset, int count) + { + throw new NotSupportedException(); + } + + /// + /// When overridden in a derived class, sets the length of the current stream. + /// + /// The desired length of the current stream in bytes. + /// The stream does not support both writing and seeking, such as if the stream is constructed from a pipe or console output. + /// An I/O error occurs. + /// Methods were called after the stream was closed. + public override void SetLength(long value) + { + throw new NotSupportedException(); + } + + /// + /// When overridden in a derived class, sets the position within the current stream. + /// + /// A byte offset relative to the origin parameter. + /// A value of type indicating the reference point used to obtain the new position. + /// + /// The new position within the current stream. + /// + /// An I/O error occurs. + /// The stream does not support seeking, such as if the stream is constructed from a pipe or console output. + /// Methods were called after the stream was closed. + public override long Seek(long offset, SeekOrigin origin) + { + long newPos = readPos_; + + switch ( origin ) + { + case SeekOrigin.Begin: + newPos = start_ + offset; + break; + + case SeekOrigin.Current: + newPos = readPos_ + offset; + break; + + case SeekOrigin.End: + newPos = end_ + offset; + break; + } + + if ( newPos < start_ ) { + throw new ArgumentException("Negative position is invalid"); + } + + if ( newPos >= end_ ) { + throw new IOException("Cannot seek past end"); + } + readPos_ = newPos; + return readPos_; + } + + /// + /// Clears all buffers for this stream and causes any buffered data to be written to the underlying device. + /// + /// An I/O error occurs. + public override void Flush() + { + // Nothing to do. + } + + /// + /// Gets or sets the position within the current stream. + /// + /// + /// The current position within the stream. + /// An I/O error occurs. + /// The stream does not support seeking. + /// Methods were called after the stream was closed. + public override long Position { + get { return readPos_ - start_; } + set { + long newPos = start_ + value; + + if ( newPos < start_ ) { + throw new ArgumentException("Negative position is invalid"); + } + + if ( newPos >= end_ ) { + throw new InvalidOperationException("Cannot seek past end"); + } + readPos_ = newPos; + } + } + + /// + /// Gets the length in bytes of the stream. + /// + /// + /// A long value representing the length of the stream in bytes. + /// A class derived from Stream does not support seeking. + /// Methods were called after the stream was closed. + public override long Length { + get { return length_; } + } + + /// + /// Gets a value indicating whether the current stream supports writing. + /// + /// false + /// true if the stream supports writing; otherwise, false. + public override bool CanWrite { + get { return false; } + } + + /// + /// Gets a value indicating whether the current stream supports seeking. + /// + /// true + /// true if the stream supports seeking; otherwise, false. + public override bool CanSeek { + get { return true; } + } + + /// + /// Gets a value indicating whether the current stream supports reading. + /// + /// true. + /// true if the stream supports reading; otherwise, false. + public override bool CanRead { + get { return true; } + } + +#if !NET_1_0 && !NET_1_1 && !NETCF_1_0 + /// + /// Gets a value that determines whether the current stream can time out. + /// + /// + /// A value that determines whether the current stream can time out. + public override bool CanTimeout { + get { return baseStream_.CanTimeout; } + } +#endif + #region Instance Fields + ZipFile zipFile_; + Stream baseStream_; + long start_; + long length_; + long readPos_; + long end_; + #endregion + } + #endregion + } + + #endregion + + #region DataSources + /// + /// Provides a static way to obtain a source of data for an entry. + /// + public interface IStaticDataSource + { + /// + /// Get a source of data by creating a new stream. + /// + /// Returns a to use for compression input. + /// Ideally a new stream is created and opened to achieve this, to avoid locking problems. + Stream GetSource(); + } + + /// + /// Represents a source of data that can dynamically provide + /// multiple data sources based on the parameters passed. + /// + public interface IDynamicDataSource + { + /// + /// Get a data source. + /// + /// The to get a source for. + /// The name for data if known. + /// Returns a to use for compression input. + /// Ideally a new stream is created and opened to achieve this, to avoid locking problems. + Stream GetSource(ZipEntry entry, string name); + } + + /// + /// Default implementation of a for use with files stored on disk. + /// + public class StaticDiskDataSource : IStaticDataSource + { + /// + /// Initialise a new instnace of + /// + /// The name of the file to obtain data from. + public StaticDiskDataSource(string fileName) + { + fileName_ = fileName; + } + + #region IDataSource Members + + /// + /// Get a providing data. + /// + /// Returns a provising data. + public Stream GetSource() + { + return File.Open(fileName_, FileMode.Open, FileAccess.Read, FileShare.Read); + } + + #endregion + #region Instance Fields + string fileName_; + #endregion + } + + + /// + /// Default implementation of for files stored on disk. + /// + public class DynamicDiskDataSource : IDynamicDataSource + { + /// + /// Initialise a default instance of . + /// + public DynamicDiskDataSource() + { + } + + #region IDataSource Members + /// + /// Get a providing data for an entry. + /// + /// The entry to provide data for. + /// The file name for data if known. + /// Returns a stream providing data; or null if not available + public Stream GetSource(ZipEntry entry, string name) + { + Stream result = null; + + if ( name != null ) { + result = File.Open(name, FileMode.Open, FileAccess.Read, FileShare.Read); + } + + return result; + } + + #endregion + } + + #endregion + + #region Archive Storage + /// + /// Defines facilities for data storage when updating Zip Archives. + /// + public interface IArchiveStorage + { + /// + /// Get the to apply during updates. + /// + FileUpdateMode UpdateMode { get; } + + /// + /// Get an empty that can be used for temporary output. + /// + /// Returns a temporary output + /// + Stream GetTemporaryOutput(); + + /// + /// Convert a temporary output stream to a final stream. + /// + /// The resulting final + /// + Stream ConvertTemporaryToFinal(); + + /// + /// Make a temporary copy of the original stream. + /// + /// The to copy. + /// Returns a temporary output that is a copy of the input. + Stream MakeTemporaryCopy(Stream stream); + + /// + /// Return a stream suitable for performing direct updates on the original source. + /// + /// The current stream. + /// Returns a stream suitable for direct updating. + /// This may be the current stream passed. + Stream OpenForDirectUpdate(Stream stream); + + /// + /// Dispose of this instance. + /// + void Dispose(); + } + + /// + /// An abstract suitable for extension by inheritance. + /// + abstract public class BaseArchiveStorage : IArchiveStorage + { + #region Constructors + /// + /// Initializes a new instance of the class. + /// + /// The update mode. + protected BaseArchiveStorage(FileUpdateMode updateMode) + { + updateMode_ = updateMode; + } + #endregion + + #region IArchiveStorage Members + + /// + /// Gets a temporary output + /// + /// Returns the temporary output stream. + /// + public abstract Stream GetTemporaryOutput(); + + /// + /// Converts the temporary to its final form. + /// + /// Returns a that can be used to read + /// the final storage for the archive. + /// + public abstract Stream ConvertTemporaryToFinal(); + + /// + /// Make a temporary copy of a . + /// + /// The to make a copy of. + /// Returns a temporary output that is a copy of the input. + public abstract Stream MakeTemporaryCopy(Stream stream); + + /// + /// Return a stream suitable for performing direct updates on the original source. + /// + /// The to open for direct update. + /// Returns a stream suitable for direct updating. + public abstract Stream OpenForDirectUpdate(Stream stream); + + /// + /// Disposes this instance. + /// + public abstract void Dispose(); + + /// + /// Gets the update mode applicable. + /// + /// The update mode. + public FileUpdateMode UpdateMode + { + get { + return updateMode_; + } + } + + #endregion + + #region Instance Fields + FileUpdateMode updateMode_; + #endregion + } + + /// + /// An implementation suitable for hard disks. + /// + public class DiskArchiveStorage : BaseArchiveStorage + { + #region Constructors + /// + /// Initializes a new instance of the class. + /// + /// The file. + /// The update mode. + public DiskArchiveStorage(ZipFile file, FileUpdateMode updateMode) + : base(updateMode) + { + if ( file.Name == null ) { + throw new ZipException("Cant handle non file archives"); + } + + fileName_ = file.Name; + } + + /// + /// Initializes a new instance of the class. + /// + /// The file. + public DiskArchiveStorage(ZipFile file) + : this(file, FileUpdateMode.Safe) + { + } + #endregion + + #region IArchiveStorage Members + + /// + /// Gets a temporary output for performing updates on. + /// + /// Returns the temporary output stream. + public override Stream GetTemporaryOutput() + { + if ( temporaryName_ != null ) { + temporaryName_ = GetTempFileName(temporaryName_, true); + temporaryStream_ = File.Open(temporaryName_, FileMode.OpenOrCreate, FileAccess.Write, FileShare.None); + } + else { + // Determine where to place files based on internal strategy. + // Currently this is always done in system temp directory. + temporaryName_ = Path.GetTempFileName(); + temporaryStream_ = File.Open(temporaryName_, FileMode.OpenOrCreate, FileAccess.Write, FileShare.None); + } + + return temporaryStream_; + } + + /// + /// Converts a temporary to its final form. + /// + /// Returns a that can be used to read + /// the final storage for the archive. + public override Stream ConvertTemporaryToFinal() + { + if ( temporaryStream_ == null ) { + throw new ZipException("No temporary stream has been created"); + } + + Stream result = null; + + string moveTempName = GetTempFileName(fileName_, false); + bool newFileCreated = false; + + try { + temporaryStream_.Close(); + File.Move(fileName_, moveTempName); + File.Move(temporaryName_, fileName_); + newFileCreated = true; + File.Delete(moveTempName); + + result = File.Open(fileName_, FileMode.Open, FileAccess.Read, FileShare.Read); + } + catch(Exception) { + result = null; + + // Try to roll back changes... + if ( !newFileCreated ) { + File.Move(moveTempName, fileName_); + File.Delete(temporaryName_); + } + + throw; + } + + return result; + } + + /// + /// Make a temporary copy of a stream. + /// + /// The to copy. + /// Returns a temporary output that is a copy of the input. + public override Stream MakeTemporaryCopy(Stream stream) + { + stream.Close(); + + temporaryName_ = GetTempFileName(fileName_, true); + File.Copy(fileName_, temporaryName_, true); + + temporaryStream_ = new FileStream(temporaryName_, + FileMode.Open, + FileAccess.ReadWrite); + return temporaryStream_; + } + + /// + /// Return a stream suitable for performing direct updates on the original source. + /// + /// The current stream. + /// Returns a stream suitable for direct updating. + /// If the stream is not null this is used as is. + public override Stream OpenForDirectUpdate(Stream stream) + { + Stream result; + if ((stream == null) || !stream.CanWrite) + { + if (stream != null) { + stream.Close(); + } + + result = new FileStream(fileName_, + FileMode.Open, + FileAccess.ReadWrite); + } + else + { + result = stream; + } + + return result; + } + + /// + /// Disposes this instance. + /// + public override void Dispose() + { + if ( temporaryStream_ != null ) { + temporaryStream_.Close(); + } + } + + #endregion + + #region Internal routines + static string GetTempFileName(string original, bool makeTempFile) + { + string result = null; + + if ( original == null ) { + result = Path.GetTempFileName(); + } + else { + int counter = 0; + int suffixSeed = DateTime.Now.Second; + + while ( result == null ) { + counter += 1; + string newName = string.Format("{0}.{1}{2}.tmp", original, suffixSeed, counter); + if ( !File.Exists(newName) ) { + if ( makeTempFile) { + try { + // Try and create the file. + using ( FileStream stream = File.Create(newName) ) { + } + result = newName; + } + catch { + suffixSeed = DateTime.Now.Second; + } + } + else { + result = newName; + } + } + } + } + return result; + } + #endregion + + #region Instance Fields + Stream temporaryStream_; + string fileName_; + string temporaryName_; + #endregion + } + + /// + /// An implementation suitable for in memory streams. + /// + public class MemoryArchiveStorage : BaseArchiveStorage + { + #region Constructors + /// + /// Initializes a new instance of the class. + /// + public MemoryArchiveStorage() + : base(FileUpdateMode.Direct) + { + } + + /// + /// Initializes a new instance of the class. + /// + /// The to use + /// This constructor is for testing as memory streams dont really require safe mode. + public MemoryArchiveStorage(FileUpdateMode updateMode) + : base(updateMode) + { + } + + #endregion + + #region Properties + /// + /// Get the stream returned by if this was in fact called. + /// + public MemoryStream FinalStream + { + get { return finalStream_; } + } + + #endregion + + #region IArchiveStorage Members + + /// + /// Gets the temporary output + /// + /// Returns the temporary output stream. + public override Stream GetTemporaryOutput() + { + temporaryStream_ = new MemoryStream(); + return temporaryStream_; + } + + /// + /// Converts the temporary to its final form. + /// + /// Returns a that can be used to read + /// the final storage for the archive. + public override Stream ConvertTemporaryToFinal() + { + if ( temporaryStream_ == null ) { + throw new ZipException("No temporary stream has been created"); + } + + finalStream_ = new MemoryStream(temporaryStream_.ToArray()); + return finalStream_; + } + + /// + /// Make a temporary copy of the original stream. + /// + /// The to copy. + /// Returns a temporary output that is a copy of the input. + public override Stream MakeTemporaryCopy(Stream stream) + { + temporaryStream_ = new MemoryStream(); + stream.Position = 0; + StreamUtils.Copy(stream, temporaryStream_, new byte[4096]); + return temporaryStream_; + } + + /// + /// Return a stream suitable for performing direct updates on the original source. + /// + /// The original source stream + /// Returns a stream suitable for direct updating. + /// If the passed is not null this is used; + /// otherwise a new is returned. + public override Stream OpenForDirectUpdate(Stream stream) + { + Stream result; + if ((stream == null) || !stream.CanWrite) { + + result = new MemoryStream(); + + if (stream != null) { + stream.Position = 0; + StreamUtils.Copy(stream, result, new byte[4096]); + + stream.Close(); + } + } + else { + result = stream; + } + + return result; + } + + /// + /// Disposes this instance. + /// + public override void Dispose() + { + if ( temporaryStream_ != null ) { + temporaryStream_.Close(); + } + } + + #endregion + + #region Instance Fields + MemoryStream temporaryStream_; + MemoryStream finalStream_; + #endregion + } + + #endregion +} diff --git a/src/GitHub.Api/SharpZipLib/Zip/ZipHelperStream.cs b/src/GitHub.Api/SharpZipLib/Zip/ZipHelperStream.cs new file mode 100644 index 000000000..ed9572f05 --- /dev/null +++ b/src/GitHub.Api/SharpZipLib/Zip/ZipHelperStream.cs @@ -0,0 +1,623 @@ +// ZipHelperStream.cs +// +// Copyright 2006, 2007 John Reilly +// +// This program is free software; you can redistribute it and/or +// modify it under the terms of the GNU General Public License +// as published by the Free Software Foundation; either version 2 +// of the License, or (at your option) any later version. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. +// +// You should have received a copy of the GNU General Public License +// along with this program; if not, write to the Free Software +// Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. +// +// Linking this library statically or dynamically with other modules is +// making a combined work based on this library. Thus, the terms and +// conditions of the GNU General Public License cover the whole +// combination. +// +// As a special exception, the copyright holders of this library give you +// permission to link this library with independent modules to produce an +// executable, regardless of the license terms of these independent +// modules, and to copy and distribute the resulting executable under +// terms of your choice, provided that you also meet, for each linked +// independent module, the terms and conditions of the license of that +// module. An independent module is a module which is not derived from +// or based on this library. If you modify this library, you may extend +// this exception to your version of the library, but you are not +// obligated to do so. If you do not wish to do so, delete this +// exception statement from your version. + +using System; +using System.IO; +using System.Text; + +namespace GitHub.ICSharpCode.SharpZipLib.Zip +{ + + /// + /// Holds data pertinent to a data descriptor. + /// + public class DescriptorData + { + /// + /// Get /set the compressed size of data. + /// + public long CompressedSize + { + get { return compressedSize; } + set { compressedSize = value; } + } + + /// + /// Get / set the uncompressed size of data + /// + public long Size + { + get { return size; } + set { size = value; } + } + + /// + /// Get /set the crc value. + /// + public long Crc + { + get { return crc; } + set { crc = (value & 0xffffffff); } + } + + #region Instance Fields + long size; + long compressedSize; + long crc; + #endregion + } + + class EntryPatchData + { + public long SizePatchOffset + { + get { return sizePatchOffset_; } + set { sizePatchOffset_ = value; } + } + + public long CrcPatchOffset + { + get { return crcPatchOffset_; } + set { crcPatchOffset_ = value; } + } + + #region Instance Fields + long sizePatchOffset_; + long crcPatchOffset_; + #endregion + } + + /// + /// This class assists with writing/reading from Zip files. + /// + internal class ZipHelperStream : Stream + { + #region Constructors + /// + /// Initialise an instance of this class. + /// + /// The name of the file to open. + public ZipHelperStream(string name) + { + stream_ = new FileStream(name, FileMode.Open, FileAccess.ReadWrite); + isOwner_ = true; + } + + /// + /// Initialise a new instance of . + /// + /// The stream to use. + public ZipHelperStream(Stream stream) + { + stream_ = stream; + } + #endregion + + /// + /// Get / set a value indicating wether the the underlying stream is owned or not. + /// + /// If the stream is owned it is closed when this instance is closed. + public bool IsStreamOwner + { + get { return isOwner_; } + set { isOwner_ = value; } + } + + #region Base Stream Methods + public override bool CanRead + { + get { return stream_.CanRead; } + } + + public override bool CanSeek + { + get { return stream_.CanSeek; } + } + +#if !NET_1_0 && !NET_1_1 && !NETCF_1_0 + public override bool CanTimeout + { + get { return stream_.CanTimeout; } + } +#endif + + public override long Length + { + get { return stream_.Length; } + } + + public override long Position + { + get { return stream_.Position; } + set { stream_.Position = value; } + } + + public override bool CanWrite + { + get { return stream_.CanWrite; } + } + + public override void Flush() + { + stream_.Flush(); + } + + public override long Seek(long offset, SeekOrigin origin) + { + return stream_.Seek(offset, origin); + } + + public override void SetLength(long value) + { + stream_.SetLength(value); + } + + public override int Read(byte[] buffer, int offset, int count) + { + return stream_.Read(buffer, offset, count); + } + + public override void Write(byte[] buffer, int offset, int count) + { + stream_.Write(buffer, offset, count); + } + + /// + /// Close the stream. + /// + /// + /// The underlying stream is closed only if is true. + /// + override public void Close() + { + Stream toClose = stream_; + stream_ = null; + if (isOwner_ && (toClose != null)) + { + isOwner_ = false; + toClose.Close(); + } + } + #endregion + + // Write the local file header + // TODO: ZipHelperStream.WriteLocalHeader is not yet used and needs checking for ZipFile and ZipOuptutStream usage + void WriteLocalHeader(ZipEntry entry, EntryPatchData patchData) + { + CompressionMethod method = entry.CompressionMethod; + bool headerInfoAvailable = true; // How to get this? + bool patchEntryHeader = false; + + WriteLEInt(ZipConstants.LocalHeaderSignature); + + WriteLEShort(entry.Version); + WriteLEShort(entry.Flags); + WriteLEShort((byte)method); + WriteLEInt((int)entry.DosTime); + + if (headerInfoAvailable == true) { + WriteLEInt((int)entry.Crc); + if ( entry.LocalHeaderRequiresZip64 ) { + WriteLEInt(-1); + WriteLEInt(-1); + } + else { + WriteLEInt(entry.IsCrypted ? (int)entry.CompressedSize + ZipConstants.CryptoHeaderSize : (int)entry.CompressedSize); + WriteLEInt((int)entry.Size); + } + } else { + if (patchData != null) { + patchData.CrcPatchOffset = stream_.Position; + } + WriteLEInt(0); // Crc + + if ( patchData != null ) { + patchData.SizePatchOffset = stream_.Position; + } + + // For local header both sizes appear in Zip64 Extended Information + if ( entry.LocalHeaderRequiresZip64 && patchEntryHeader ) { + WriteLEInt(-1); + WriteLEInt(-1); + } + else { + WriteLEInt(0); // Compressed size + WriteLEInt(0); // Uncompressed size + } + } + + byte[] name = ZipConstants.ConvertToArray(entry.Flags, entry.Name); + + if (name.Length > 0xFFFF) { + throw new ZipException("Entry name too long."); + } + + ZipExtraData ed = new ZipExtraData(entry.ExtraData); + + if (entry.LocalHeaderRequiresZip64 && (headerInfoAvailable || patchEntryHeader)) { + ed.StartNewEntry(); + if (headerInfoAvailable) { + ed.AddLeLong(entry.Size); + ed.AddLeLong(entry.CompressedSize); + } + else { + ed.AddLeLong(-1); + ed.AddLeLong(-1); + } + ed.AddNewEntry(1); + + if ( !ed.Find(1) ) { + throw new ZipException("Internal error cant find extra data"); + } + + if ( patchData != null ) { + patchData.SizePatchOffset = ed.CurrentReadIndex; + } + } + else { + ed.Delete(1); + } + + byte[] extra = ed.GetEntryData(); + + WriteLEShort(name.Length); + WriteLEShort(extra.Length); + + if ( name.Length > 0 ) { + stream_.Write(name, 0, name.Length); + } + + if ( entry.LocalHeaderRequiresZip64 && patchEntryHeader ) { + patchData.SizePatchOffset += stream_.Position; + } + + if ( extra.Length > 0 ) { + stream_.Write(extra, 0, extra.Length); + } + } + + /// + /// Locates a block with the desired . + /// + /// The signature to find. + /// Location, marking the end of block. + /// Minimum size of the block. + /// The maximum variable data. + /// Eeturns the offset of the first byte after the signature; -1 if not found + public long LocateBlockWithSignature(int signature, long endLocation, int minimumBlockSize, int maximumVariableData) + { + long pos = endLocation - minimumBlockSize; + if ( pos < 0 ) { + return -1; + } + + long giveUpMarker = Math.Max(pos - maximumVariableData, 0); + + // TODO: This loop could be optimised for speed. + do { + if ( pos < giveUpMarker ) { + return -1; + } + Seek(pos--, SeekOrigin.Begin); + } while ( ReadLEInt() != signature ); + + return Position; + } + + /// + /// Write Zip64 end of central directory records (File header and locator). + /// + /// The number of entries in the central directory. + /// The size of entries in the central directory. + /// The offset of the dentral directory. + public void WriteZip64EndOfCentralDirectory(long noOfEntries, long sizeEntries, long centralDirOffset) + { + long centralSignatureOffset = stream_.Position; + WriteLEInt(ZipConstants.Zip64CentralFileHeaderSignature); + WriteLELong(44); // Size of this record (total size of remaining fields in header or full size - 12) + WriteLEShort(ZipConstants.VersionMadeBy); // Version made by + WriteLEShort(ZipConstants.VersionZip64); // Version to extract + WriteLEInt(0); // Number of this disk + WriteLEInt(0); // number of the disk with the start of the central directory + WriteLELong(noOfEntries); // No of entries on this disk + WriteLELong(noOfEntries); // Total No of entries in central directory + WriteLELong(sizeEntries); // Size of the central directory + WriteLELong(centralDirOffset); // offset of start of central directory + // zip64 extensible data sector not catered for here (variable size) + + // Write the Zip64 end of central directory locator + WriteLEInt(ZipConstants.Zip64CentralDirLocatorSignature); + + // no of the disk with the start of the zip64 end of central directory + WriteLEInt(0); + + // relative offset of the zip64 end of central directory record + WriteLELong(centralSignatureOffset); + + // total number of disks + WriteLEInt(1); + } + + /// + /// Write the required records to end the central directory. + /// + /// The number of entries in the directory. + /// The size of the entries in the directory. + /// The start of the central directory. + /// The archive comment. (This can be null). + public void WriteEndOfCentralDirectory(long noOfEntries, long sizeEntries, + long startOfCentralDirectory, byte[] comment) + { + + if ( (noOfEntries >= 0xffff) || + (startOfCentralDirectory >= 0xffffffff) || + (sizeEntries >= 0xffffffff) ) { + WriteZip64EndOfCentralDirectory(noOfEntries, sizeEntries, startOfCentralDirectory); + } + + WriteLEInt(ZipConstants.EndOfCentralDirectorySignature); + + // TODO: ZipFile Multi disk handling not done + WriteLEShort(0); // number of this disk + WriteLEShort(0); // no of disk with start of central dir + + + // Number of entries + if ( noOfEntries >= 0xffff ) { + WriteLEUshort(0xffff); // Zip64 marker + WriteLEUshort(0xffff); + } + else { + WriteLEShort(( short )noOfEntries); // entries in central dir for this disk + WriteLEShort(( short )noOfEntries); // total entries in central directory + } + + // Size of the central directory + if ( sizeEntries >= 0xffffffff ) { + WriteLEUint(0xffffffff); // Zip64 marker + } + else { + WriteLEInt(( int )sizeEntries); + } + + + // offset of start of central directory + if ( startOfCentralDirectory >= 0xffffffff ) { + WriteLEUint(0xffffffff); // Zip64 marker + } + else { + WriteLEInt(( int )startOfCentralDirectory); + } + + int commentLength = (comment != null) ? comment.Length : 0; + + if ( commentLength > 0xffff ) { + throw new ZipException(string.Format("Comment length({0}) is too long can only be 64K", commentLength)); + } + + WriteLEShort(commentLength); + + if ( commentLength > 0 ) { + Write(comment, 0, comment.Length); + } + } + + #region LE value reading/writing + /// + /// Read an unsigned short in little endian byte order. + /// + /// Returns the value read. + /// + /// An i/o error occurs. + /// + /// + /// The file ends prematurely + /// + public int ReadLEShort() + { + int byteValue1 = stream_.ReadByte(); + + if (byteValue1 < 0) { + throw new EndOfStreamException(); + } + + int byteValue2 = stream_.ReadByte(); + if (byteValue2 < 0) { + throw new EndOfStreamException(); + } + + return byteValue1 | (byteValue2 << 8); + } + + /// + /// Read an int in little endian byte order. + /// + /// Returns the value read. + /// + /// An i/o error occurs. + /// + /// + /// The file ends prematurely + /// + public int ReadLEInt() + { + return ReadLEShort() | (ReadLEShort() << 16); + } + + /// + /// Read a long in little endian byte order. + /// + /// The value read. + public long ReadLELong() + { + return (uint)ReadLEInt() | ((long)ReadLEInt() << 32); + } + + /// + /// Write an unsigned short in little endian byte order. + /// + /// The value to write. + public void WriteLEShort(int value) + { + stream_.WriteByte(( byte )(value & 0xff)); + stream_.WriteByte(( byte )((value >> 8) & 0xff)); + } + + /// + /// Write a ushort in little endian byte order. + /// + /// The value to write. + public void WriteLEUshort(ushort value) + { + stream_.WriteByte(( byte )(value & 0xff)); + stream_.WriteByte(( byte )(value >> 8)); + } + + /// + /// Write an int in little endian byte order. + /// + /// The value to write. + public void WriteLEInt(int value) + { + WriteLEShort(value); + WriteLEShort(value >> 16); + } + + /// + /// Write a uint in little endian byte order. + /// + /// The value to write. + public void WriteLEUint(uint value) + { + WriteLEUshort(( ushort )(value & 0xffff)); + WriteLEUshort(( ushort )(value >> 16)); + } + + /// + /// Write a long in little endian byte order. + /// + /// The value to write. + public void WriteLELong(long value) + { + WriteLEInt(( int )value); + WriteLEInt(( int )(value >> 32)); + } + + /// + /// Write a ulong in little endian byte order. + /// + /// The value to write. + public void WriteLEUlong(ulong value) + { + WriteLEUint(( uint )(value & 0xffffffff)); + WriteLEUint(( uint )(value >> 32)); + } + + #endregion + + /// + /// Write a data descriptor. + /// + /// The entry to write a descriptor for. + /// Returns the number of descriptor bytes written. + public int WriteDataDescriptor(ZipEntry entry) + { + if (entry == null) { + throw new ArgumentNullException("entry"); + } + + int result=0; + + // Add data descriptor if flagged as required + if ((entry.Flags & (int)GeneralBitFlags.Descriptor) != 0) + { + // The signature is not PKZIP originally but is now described as optional + // in the PKZIP Appnote documenting trhe format. + WriteLEInt(ZipConstants.DataDescriptorSignature); + WriteLEInt(unchecked((int)(entry.Crc))); + + result+=8; + + if (entry.LocalHeaderRequiresZip64) + { + WriteLELong(entry.CompressedSize); + WriteLELong(entry.Size); + result+=16; + } + else + { + WriteLEInt((int)entry.CompressedSize); + WriteLEInt((int)entry.Size); + result+=8; + } + } + + return result; + } + + /// + /// Read data descriptor at the end of compressed data. + /// + /// if set to true [zip64]. + /// The data to fill in. + /// Returns the number of bytes read in the descriptor. + public void ReadDataDescriptor(bool zip64, DescriptorData data) + { + int intValue = ReadLEInt(); + + // In theory this may not be a descriptor according to PKZIP appnote. + // In practise its always there. + if (intValue != ZipConstants.DataDescriptorSignature) { + throw new ZipException("Data descriptor signature not found"); + } + + data.Crc = ReadLEInt(); + + if (zip64) { + data.CompressedSize = ReadLELong(); + data.Size = ReadLELong(); + } + else { + data.CompressedSize = ReadLEInt(); + data.Size = ReadLEInt(); + } + } + + #region Instance Fields + bool isOwner_; + Stream stream_; + #endregion + } +} diff --git a/src/GitHub.Api/SharpZipLib/Zip/ZipInputStream.cs b/src/GitHub.Api/SharpZipLib/Zip/ZipInputStream.cs new file mode 100644 index 000000000..90848d018 --- /dev/null +++ b/src/GitHub.Api/SharpZipLib/Zip/ZipInputStream.cs @@ -0,0 +1,675 @@ +// ZipInputStream.cs +// +// Copyright (C) 2001 Mike Krueger +// Copyright (C) 2004 John Reilly +// +// This file was translated from java, it was part of the GNU Classpath +// Copyright (C) 2001 Free Software Foundation, Inc. +// +// This program is free software; you can redistribute it and/or +// modify it under the terms of the GNU General Public License +// as published by the Free Software Foundation; either version 2 +// of the License, or (at your option) any later version. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. +// +// You should have received a copy of the GNU General Public License +// along with this program; if not, write to the Free Software +// Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. +// +// Linking this library statically or dynamically with other modules is +// making a combined work based on this library. Thus, the terms and +// conditions of the GNU General Public License cover the whole +// combination. +// +// As a special exception, the copyright holders of this library give you +// permission to link this library with independent modules to produce an +// executable, regardless of the license terms of these independent +// modules, and to copy and distribute the resulting executable under +// terms of your choice, provided that you also meet, for each linked +// independent module, the terms and conditions of the license of that +// module. An independent module is a module which is not derived from +// or based on this library. If you modify this library, you may extend +// this exception to your version of the library, but you are not +// obligated to do so. If you do not wish to do so, delete this +// exception statement from your version. + +// HISTORY +// 2010-05-25 Z-1663 Fixed exception when testing local header compressed size of -1 + +using System; +using System.IO; + +using GitHub.ICSharpCode.SharpZipLib.Checksums; +using GitHub.ICSharpCode.SharpZipLib.Zip.Compression; +using GitHub.ICSharpCode.SharpZipLib.Zip.Compression.Streams; + +#if !NETCF_1_0 +using GitHub.ICSharpCode.SharpZipLib.Encryption; +#endif + +namespace GitHub.ICSharpCode.SharpZipLib.Zip +{ + /// + /// This is an InflaterInputStream that reads the files baseInputStream an zip archive + /// one after another. It has a special method to get the zip entry of + /// the next file. The zip entry contains information about the file name + /// size, compressed size, Crc, etc. + /// It includes support for Stored and Deflated entries. + ///
+ ///
Author of the original java version : Jochen Hoenicke + ///
+ /// + /// This sample shows how to read a zip file + /// + /// using System; + /// using System.Text; + /// using System.IO; + /// + /// using GitHub.ICSharpCode.SharpZipLib.Zip; + /// + /// class MainClass + /// { + /// public static void Main(string[] args) + /// { + /// using ( ZipInputStream s = new ZipInputStream(File.OpenRead(args[0]))) { + /// + /// ZipEntry theEntry; + /// const int size = 2048; + /// byte[] data = new byte[2048]; + /// + /// while ((theEntry = s.GetNextEntry()) != null) { + /// if ( entry.IsFile ) { + /// Console.Write("Show contents (y/n) ?"); + /// if (Console.ReadLine() == "y") { + /// while (true) { + /// size = s.Read(data, 0, data.Length); + /// if (size > 0) { + /// Console.Write(new ASCIIEncoding().GetString(data, 0, size)); + /// } else { + /// break; + /// } + /// } + /// } + /// } + /// } + /// } + /// } + /// } + /// + /// + public class ZipInputStream : InflaterInputStream + { + #region Instance Fields + + /// + /// Delegate for reading bytes from a stream. + /// + delegate int ReadDataHandler(byte[] b, int offset, int length); + + /// + /// The current reader this instance. + /// + ReadDataHandler internalReader; + + Crc32 crc = new Crc32(); + ZipEntry entry; + + long size; + int method; + int flags; + string password; + #endregion + + #region Constructors + /// + /// Creates a new Zip input stream, for reading a zip archive. + /// + /// The underlying providing data. + public ZipInputStream(Stream baseInputStream) + : base(baseInputStream, new Inflater(true)) + { + internalReader = new ReadDataHandler(ReadingNotAvailable); + } + + /// + /// Creates a new Zip input stream, for reading a zip archive. + /// + /// The underlying providing data. + /// Size of the buffer. + public ZipInputStream( Stream baseInputStream, int bufferSize ) + : base(baseInputStream, new Inflater(true), bufferSize) + { + internalReader = new ReadDataHandler(ReadingNotAvailable); + } + #endregion + + /// + /// Optional password used for encryption when non-null + /// + /// A password for all encrypted entries in this + public string Password + { + get { + return password; + } + set { + password = value; + } + } + + + /// + /// Gets a value indicating if there is a current entry and it can be decompressed + /// + /// + /// The entry can only be decompressed if the library supports the zip features required to extract it. + /// See the ZipEntry Version property for more details. + /// + public bool CanDecompressEntry { + get { + return (entry != null) && entry.CanDecompress; + } + } + + /// + /// Advances to the next entry in the archive + /// + /// + /// The next entry in the archive or null if there are no more entries. + /// + /// + /// If the previous entry is still open CloseEntry is called. + /// + /// + /// Input stream is closed + /// + /// + /// Password is not set, password is invalid, compression method is invalid, + /// version required to extract is not supported + /// + public ZipEntry GetNextEntry() + { + if (crc == null) { + throw new InvalidOperationException("Closed."); + } + + if (entry != null) { + CloseEntry(); + } + + int header = inputBuffer.ReadLeInt(); + + if (header == ZipConstants.CentralHeaderSignature || + header == ZipConstants.EndOfCentralDirectorySignature || + header == ZipConstants.CentralHeaderDigitalSignature || + header == ZipConstants.ArchiveExtraDataSignature || + header == ZipConstants.Zip64CentralFileHeaderSignature) { + // No more individual entries exist + Close(); + return null; + } + + // -jr- 07-Dec-2003 Ignore spanning temporary signatures if found + // Spanning signature is same as descriptor signature and is untested as yet. + if ( (header == ZipConstants.SpanningTempSignature) || (header == ZipConstants.SpanningSignature) ) { + header = inputBuffer.ReadLeInt(); + } + + if (header != ZipConstants.LocalHeaderSignature) { + throw new ZipException("Wrong Local header signature: 0x" + String.Format("{0:X}", header)); + } + + short versionRequiredToExtract = (short)inputBuffer.ReadLeShort(); + + flags = inputBuffer.ReadLeShort(); + method = inputBuffer.ReadLeShort(); + uint dostime = (uint)inputBuffer.ReadLeInt(); + int crc2 = inputBuffer.ReadLeInt(); + csize = inputBuffer.ReadLeInt(); + size = inputBuffer.ReadLeInt(); + int nameLen = inputBuffer.ReadLeShort(); + int extraLen = inputBuffer.ReadLeShort(); + + bool isCrypted = (flags & 1) == 1; + + byte[] buffer = new byte[nameLen]; + inputBuffer.ReadRawBuffer(buffer); + + string name = ZipConstants.ConvertToStringExt(flags, buffer); + + entry = new ZipEntry(name, versionRequiredToExtract); + entry.Flags = flags; + + entry.CompressionMethod = (CompressionMethod)method; + + if ((flags & 8) == 0) { + entry.Crc = crc2 & 0xFFFFFFFFL; + entry.Size = size & 0xFFFFFFFFL; + entry.CompressedSize = csize & 0xFFFFFFFFL; + + entry.CryptoCheckValue = (byte)((crc2 >> 24) & 0xff); + + } else { + + // This allows for GNU, WinZip and possibly other archives, the PKZIP spec + // says these values are zero under these circumstances. + if (crc2 != 0) { + entry.Crc = crc2 & 0xFFFFFFFFL; + } + + if (size != 0) { + entry.Size = size & 0xFFFFFFFFL; + } + + if (csize != 0) { + entry.CompressedSize = csize & 0xFFFFFFFFL; + } + + entry.CryptoCheckValue = (byte)((dostime >> 8) & 0xff); + } + + entry.DosTime = dostime; + + // If local header requires Zip64 is true then the extended header should contain + // both values. + + // Handle extra data if present. This can set/alter some fields of the entry. + if (extraLen > 0) { + byte[] extra = new byte[extraLen]; + inputBuffer.ReadRawBuffer(extra); + entry.ExtraData = extra; + } + + entry.ProcessExtraData(true); + if ( entry.CompressedSize >= 0 ) { + csize = entry.CompressedSize; + } + + if ( entry.Size >= 0 ) { + size = entry.Size; + } + + if (method == (int)CompressionMethod.Stored && (!isCrypted && csize != size || (isCrypted && csize - ZipConstants.CryptoHeaderSize != size))) { + throw new ZipException("Stored, but compressed != uncompressed"); + } + + // Determine how to handle reading of data if this is attempted. + if (entry.IsCompressionMethodSupported()) { + internalReader = new ReadDataHandler(InitialRead); + } else { + internalReader = new ReadDataHandler(ReadingNotSupported); + } + + return entry; + } + + /// + /// Read data descriptor at the end of compressed data. + /// + void ReadDataDescriptor() + { + if (inputBuffer.ReadLeInt() != ZipConstants.DataDescriptorSignature) { + throw new ZipException("Data descriptor signature not found"); + } + + entry.Crc = inputBuffer.ReadLeInt() & 0xFFFFFFFFL; + + if ( entry.LocalHeaderRequiresZip64 ) { + csize = inputBuffer.ReadLeLong(); + size = inputBuffer.ReadLeLong(); + } else { + csize = inputBuffer.ReadLeInt(); + size = inputBuffer.ReadLeInt(); + } + entry.CompressedSize = csize; + entry.Size = size; + } + + /// + /// Complete cleanup as the final part of closing. + /// + /// True if the crc value should be tested + void CompleteCloseEntry(bool testCrc) + { + StopDecrypting(); + + if ((flags & 8) != 0) { + ReadDataDescriptor(); + } + + size = 0; + + if ( testCrc && + ((crc.Value & 0xFFFFFFFFL) != entry.Crc) && (entry.Crc != -1)) { + throw new ZipException("CRC mismatch"); + } + + crc.Reset(); + + if (method == (int)CompressionMethod.Deflated) { + inf.Reset(); + } + entry = null; + } + + /// + /// Closes the current zip entry and moves to the next one. + /// + /// + /// The stream is closed + /// + /// + /// The Zip stream ends early + /// + public void CloseEntry() + { + if (crc == null) { + throw new InvalidOperationException("Closed"); + } + + if (entry == null) { + return; + } + + if (method == (int)CompressionMethod.Deflated) { + if ((flags & 8) != 0) { + // We don't know how much we must skip, read until end. + byte[] tmp = new byte[4096]; + + // Read will close this entry + while (Read(tmp, 0, tmp.Length) > 0) { + } + return; + } + + csize -= inf.TotalIn; + inputBuffer.Available += inf.RemainingInput; + } + + if ( (inputBuffer.Available > csize) && (csize >= 0) ) { + inputBuffer.Available = (int)((long)inputBuffer.Available - csize); + } else { + csize -= inputBuffer.Available; + inputBuffer.Available = 0; + while (csize != 0) { + long skipped = base.Skip(csize); + + if (skipped <= 0) { + throw new ZipException("Zip archive ends early."); + } + + csize -= skipped; + } + } + + CompleteCloseEntry(false); + } + + /// + /// Returns 1 if there is an entry available + /// Otherwise returns 0. + /// + public override int Available { + get { + return entry != null ? 1 : 0; + } + } + + /// + /// Returns the current size that can be read from the current entry if available + /// + /// Thrown if the entry size is not known. + /// Thrown if no entry is currently available. + public override long Length + { + get { + if ( entry != null ) { + if ( entry.Size >= 0 ) { + return entry.Size; + } else { + throw new ZipException("Length not available for the current entry"); + } + } + else { + throw new InvalidOperationException("No current entry"); + } + } + + } + + /// + /// Reads a byte from the current zip entry. + /// + /// + /// The byte or -1 if end of stream is reached. + /// + public override int ReadByte() + { + byte[] b = new byte[1]; + if (Read(b, 0, 1) <= 0) { + return -1; + } + return b[0] & 0xff; + } + + /// + /// Handle attempts to read by throwing an . + /// + /// The destination array to store data in. + /// The offset at which data read should be stored. + /// The maximum number of bytes to read. + /// Returns the number of bytes actually read. + int ReadingNotAvailable(byte[] destination, int offset, int count) + { + throw new InvalidOperationException("Unable to read from this stream"); + } + + /// + /// Handle attempts to read from this entry by throwing an exception + /// + int ReadingNotSupported(byte[] destination, int offset, int count) + { + throw new ZipException("The compression method for this entry is not supported"); + } + + /// + /// Perform the initial read on an entry which may include + /// reading encryption headers and setting up inflation. + /// + /// The destination to fill with data read. + /// The offset to start reading at. + /// The maximum number of bytes to read. + /// The actual number of bytes read. + int InitialRead(byte[] destination, int offset, int count) + { + if ( !CanDecompressEntry ) { + throw new ZipException("Library cannot extract this entry. Version required is (" + entry.Version.ToString() + ")"); + } + + // Handle encryption if required. + if (entry.IsCrypted) { +#if NETCF_1_0 + throw new ZipException("Encryption not supported for Compact Framework 1.0"); +#else + if (password == null) { + throw new ZipException("No password set."); + } + + // Generate and set crypto transform... + PkzipClassicManaged managed = new PkzipClassicManaged(); + byte[] key = PkzipClassic.GenerateKeys(ZipConstants.ConvertToArray(password)); + + inputBuffer.CryptoTransform = managed.CreateDecryptor(key, null); + + byte[] cryptbuffer = new byte[ZipConstants.CryptoHeaderSize]; + inputBuffer.ReadClearTextBuffer(cryptbuffer, 0, ZipConstants.CryptoHeaderSize); + + if (cryptbuffer[ZipConstants.CryptoHeaderSize - 1] != entry.CryptoCheckValue) { + throw new ZipException("Invalid password"); + } + + if (csize >= ZipConstants.CryptoHeaderSize) { + csize -= ZipConstants.CryptoHeaderSize; + } + else if ( (entry.Flags & (int)GeneralBitFlags.Descriptor) == 0 ) { + throw new ZipException(string.Format("Entry compressed size {0} too small for encryption", csize)); + } +#endif + } else { +#if !NETCF_1_0 + inputBuffer.CryptoTransform = null; +#endif + } + + if ((csize > 0) || ((flags & (int)GeneralBitFlags.Descriptor) != 0)) { + if ((method == (int)CompressionMethod.Deflated) && (inputBuffer.Available > 0)) { + inputBuffer.SetInflaterInput(inf); + } + + internalReader = new ReadDataHandler(BodyRead); + return BodyRead(destination, offset, count); + } + else { + internalReader = new ReadDataHandler(ReadingNotAvailable); + return 0; + } + } + + /// + /// Read a block of bytes from the stream. + /// + /// The destination for the bytes. + /// The index to start storing data. + /// The number of bytes to attempt to read. + /// Returns the number of bytes read. + /// Zero bytes read means end of stream. + public override int Read(byte[] buffer, int offset, int count) + { + if ( buffer == null ) { + throw new ArgumentNullException("buffer"); + } + + if ( offset < 0 ) { +#if NETCF_1_0 + throw new ArgumentOutOfRangeException("offset"); +#else + throw new ArgumentOutOfRangeException("offset", "Cannot be negative"); +#endif + } + + if ( count < 0 ) { +#if NETCF_1_0 + throw new ArgumentOutOfRangeException("count"); +#else + throw new ArgumentOutOfRangeException("count", "Cannot be negative"); +#endif + } + + if ( (buffer.Length - offset) < count ) { + throw new ArgumentException("Invalid offset/count combination"); + } + + return internalReader(buffer, offset, count); + } + + /// + /// Reads a block of bytes from the current zip entry. + /// + /// + /// The number of bytes read (this may be less than the length requested, even before the end of stream), or 0 on end of stream. + /// + /// + /// An i/o error occured. + /// + /// + /// The deflated stream is corrupted. + /// + /// + /// The stream is not open. + /// + int BodyRead(byte[] buffer, int offset, int count) + { + if ( crc == null ) { + throw new InvalidOperationException("Closed"); + } + + if ( (entry == null) || (count <= 0) ) { + return 0; + } + + if ( offset + count > buffer.Length ) { + throw new ArgumentException("Offset + count exceeds buffer size"); + } + + bool finished = false; + + switch (method) { + case (int)CompressionMethod.Deflated: + count = base.Read(buffer, offset, count); + if (count <= 0) { + if (!inf.IsFinished) { + throw new ZipException("Inflater not finished!"); + } + inputBuffer.Available = inf.RemainingInput; + + // A csize of -1 is from an unpatched local header + if ((flags & 8) == 0 && + (inf.TotalIn != csize && csize != 0xFFFFFFFF && csize != -1 || inf.TotalOut != size)) { + throw new ZipException("Size mismatch: " + csize + ";" + size + " <-> " + inf.TotalIn + ";" + inf.TotalOut); + } + inf.Reset(); + finished = true; + } + break; + + case (int)CompressionMethod.Stored: + if ( (count > csize) && (csize >= 0) ) { + count = (int)csize; + } + + if ( count > 0 ) { + count = inputBuffer.ReadClearTextBuffer(buffer, offset, count); + if (count > 0) { + csize -= count; + size -= count; + } + } + + if (csize == 0) { + finished = true; + } else { + if (count < 0) { + throw new ZipException("EOF in stored block"); + } + } + break; + } + + if (count > 0) { + crc.Update(buffer, offset, count); + } + + if (finished) { + CompleteCloseEntry(true); + } + + return count; + } + + /// + /// Closes the zip input stream + /// + public override void Close() + { + internalReader = new ReadDataHandler(ReadingNotAvailable); + crc = null; + entry = null; + + base.Close(); + } + } +} diff --git a/src/GitHub.Api/SharpZipLib/Zip/ZipNameTransform.cs b/src/GitHub.Api/SharpZipLib/Zip/ZipNameTransform.cs new file mode 100644 index 000000000..916f4b2e7 --- /dev/null +++ b/src/GitHub.Api/SharpZipLib/Zip/ZipNameTransform.cs @@ -0,0 +1,269 @@ +// ZipNameTransform.cs +// +// Copyright 2005 John Reilly +// +// This program is free software; you can redistribute it and/or +// modify it under the terms of the GNU General Public License +// as published by the Free Software Foundation; either version 2 +// of the License, or (at your option) any later version. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. +// +// You should have received a copy of the GNU General Public License +// along with this program; if not, write to the Free Software +// Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. +// +// Linking this library statically or dynamically with other modules is +// making a combined work based on this library. Thus, the terms and +// conditions of the GNU General Public License cover the whole +// combination. +// +// As a special exception, the copyright holders of this library give you +// permission to link this library with independent modules to produce an +// executable, regardless of the license terms of these independent +// modules, and to copy and distribute the resulting executable under +// terms of your choice, provided that you also meet, for each linked +// independent module, the terms and conditions of the license of that +// module. An independent module is a module which is not derived from +// or based on this library. If you modify this library, you may extend +// this exception to your version of the library, but you are not +// obligated to do so. If you do not wish to do so, delete this +// exception statement from your version. + + +using System; +using System.IO; +using System.Text; + +using GitHub.ICSharpCode.SharpZipLib.Core; + +namespace GitHub.ICSharpCode.SharpZipLib.Zip +{ + /// + /// ZipNameTransform transforms names as per the Zip file naming convention. + /// + /// The use of absolute names is supported although its use is not valid + /// according to Zip naming conventions, and should not be used if maximum compatability is desired. + public class ZipNameTransform : INameTransform + { + #region Constructors + /// + /// Initialize a new instance of + /// + public ZipNameTransform() + { + } + + /// + /// Initialize a new instance of + /// + /// The string to trim from the front of paths if found. + public ZipNameTransform(string trimPrefix) + { + TrimPrefix = trimPrefix; + } + #endregion + + /// + /// Static constructor. + /// + static ZipNameTransform() + { + char[] invalidPathChars; +#if NET_1_0 || NET_1_1 || NETCF_1_0 + invalidPathChars = Path.InvalidPathChars; +#else + invalidPathChars = Path.GetInvalidPathChars(); +#endif + int howMany = invalidPathChars.Length + 2; + + InvalidEntryCharsRelaxed = new char[howMany]; + Array.Copy(invalidPathChars, 0, InvalidEntryCharsRelaxed, 0, invalidPathChars.Length); + InvalidEntryCharsRelaxed[howMany - 1] = '*'; + InvalidEntryCharsRelaxed[howMany - 2] = '?'; + + howMany = invalidPathChars.Length + 4; + InvalidEntryChars = new char[howMany]; + Array.Copy(invalidPathChars, 0, InvalidEntryChars, 0, invalidPathChars.Length); + InvalidEntryChars[howMany - 1] = ':'; + InvalidEntryChars[howMany - 2] = '\\'; + InvalidEntryChars[howMany - 3] = '*'; + InvalidEntryChars[howMany - 4] = '?'; + } + + /// + /// Transform a windows directory name according to the Zip file naming conventions. + /// + /// The directory name to transform. + /// The transformed name. + public string TransformDirectory(string name) + { + name = TransformFile(name); + if (name.Length > 0) { + if ( !name.EndsWith("/") ) { + name += "/"; + } + } + else { + throw new ZipException("Cannot have an empty directory name"); + } + return name; + } + + /// + /// Transform a windows file name according to the Zip file naming conventions. + /// + /// The file name to transform. + /// The transformed name. + public string TransformFile(string name) + { + if (name != null) { + string lowerName = name.ToLower(); + if ( (trimPrefix_ != null) && (lowerName.IndexOf(trimPrefix_) == 0) ) { + name = name.Substring(trimPrefix_.Length); + } + + name = name.Replace(@"\", "/"); + name = WindowsPathUtils.DropPathRoot(name); + + // Drop any leading slashes. + while ((name.Length > 0) && (name[0] == '/')) + { + name = name.Remove(0, 1); + } + + // Drop any trailing slashes. + while ((name.Length > 0) && (name[name.Length - 1] == '/')) + { + name = name.Remove(name.Length - 1, 1); + } + + // Convert consecutive // characters to / + int index = name.IndexOf("//"); + while (index >= 0) + { + name = name.Remove(index, 1); + index = name.IndexOf("//"); + } + + name = MakeValidName(name, '_'); + } + else { + name = string.Empty; + } + return name; + } + + /// + /// Get/set the path prefix to be trimmed from paths if present. + /// + /// The prefix is trimmed before any conversion from + /// a windows path is done. + public string TrimPrefix + { + get { return trimPrefix_; } + set { + trimPrefix_ = value; + if (trimPrefix_ != null) { + trimPrefix_ = trimPrefix_.ToLower(); + } + } + } + + /// + /// Force a name to be valid by replacing invalid characters with a fixed value + /// + /// The name to force valid + /// The replacement character to use. + /// Returns a valid name + static string MakeValidName(string name, char replacement) + { + int index = name.IndexOfAny(InvalidEntryChars); + if (index >= 0) { + StringBuilder builder = new StringBuilder(name); + + while (index >= 0 ) { + builder[index] = replacement; + + if (index >= name.Length) { + index = -1; + } + else { + index = name.IndexOfAny(InvalidEntryChars, index + 1); + } + } + name = builder.ToString(); + } + + if (name.Length > 0xffff) { + throw new PathTooLongException(); + } + + return name; + } + + /// + /// Test a name to see if it is a valid name for a zip entry. + /// + /// The name to test. + /// If true checking is relaxed about windows file names and absolute paths. + /// Returns true if the name is a valid zip name; false otherwise. + /// Zip path names are actually in Unix format, and should only contain relative paths. + /// This means that any path stored should not contain a drive or + /// device letter, or a leading slash. All slashes should forward slashes '/'. + /// An empty name is valid for a file where the input comes from standard input. + /// A null name is not considered valid. + /// + public static bool IsValidName(string name, bool relaxed) + { + bool result = (name != null); + + if ( result ) { + if ( relaxed ) { + result = name.IndexOfAny(InvalidEntryCharsRelaxed) < 0; + } + else { + result = + (name.IndexOfAny(InvalidEntryChars) < 0) && + (name.IndexOf('/') != 0); + } + } + + return result; + } + + /// + /// Test a name to see if it is a valid name for a zip entry. + /// + /// The name to test. + /// Returns true if the name is a valid zip name; false otherwise. + /// Zip path names are actually in unix format, + /// and should only contain relative paths if a path is present. + /// This means that the path stored should not contain a drive or + /// device letter, or a leading slash. All slashes should forward slashes '/'. + /// An empty name is valid where the input comes from standard input. + /// A null name is not considered valid. + /// + public static bool IsValidName(string name) + { + bool result = + (name != null) && + (name.IndexOfAny(InvalidEntryChars) < 0) && + (name.IndexOf('/') != 0) + ; + return result; + } + + #region Instance Fields + string trimPrefix_; + #endregion + + #region Class Fields + static readonly char[] InvalidEntryChars; + static readonly char[] InvalidEntryCharsRelaxed; + #endregion + } +} diff --git a/src/GitHub.Api/SharpZipLib/Zip/ZipOutputStream.cs b/src/GitHub.Api/SharpZipLib/Zip/ZipOutputStream.cs new file mode 100644 index 000000000..a5cb5bd20 --- /dev/null +++ b/src/GitHub.Api/SharpZipLib/Zip/ZipOutputStream.cs @@ -0,0 +1,900 @@ +// ZipOutputStream.cs +// +// Copyright (C) 2001 Mike Krueger +// Copyright (C) 2004 John Reilly +// +// This file was translated from java, it was part of the GNU Classpath +// Copyright (C) 2001 Free Software Foundation, Inc. +// +// This program is free software; you can redistribute it and/or +// modify it under the terms of the GNU General Public License +// as published by the Free Software Foundation; either version 2 +// of the License, or (at your option) any later version. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. +// +// You should have received a copy of the GNU General Public License +// along with this program; if not, write to the Free Software +// Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. +// +// Linking this library statically or dynamically with other modules is +// making a combined work based on this library. Thus, the terms and +// conditions of the GNU General Public License cover the whole +// combination. +// +// As a special exception, the copyright holders of this library give you +// permission to link this library with independent modules to produce an +// executable, regardless of the license terms of these independent +// modules, and to copy and distribute the resulting executable under +// terms of your choice, provided that you also meet, for each linked +// independent module, the terms and conditions of the license of that +// module. An independent module is a module which is not derived from +// or based on this library. If you modify this library, you may extend +// this exception to your version of the library, but you are not +// obligated to do so. If you do not wish to do so, delete this +// exception statement from your version. + +// HISTORY +// 22-12-2009 Z-1649 Added AES support +// 22-02-2010 Z-1648 Zero byte entries would create invalid zip files + +using System; +using System.IO; +using System.Collections; + +using GitHub.ICSharpCode.SharpZipLib.Checksums; +using GitHub.ICSharpCode.SharpZipLib.Zip.Compression; +using GitHub.ICSharpCode.SharpZipLib.Zip.Compression.Streams; + +namespace GitHub.ICSharpCode.SharpZipLib.Zip +{ + /// + /// This is a DeflaterOutputStream that writes the files into a zip + /// archive one after another. It has a special method to start a new + /// zip entry. The zip entries contains information about the file name + /// size, compressed size, CRC, etc. + /// + /// It includes support for Stored and Deflated entries. + /// This class is not thread safe. + ///
+ ///
Author of the original java version : Jochen Hoenicke + ///
+ /// This sample shows how to create a zip file + /// + /// using System; + /// using System.IO; + /// + /// using GitHub.ICSharpCode.SharpZipLib.Core; + /// using GitHub.ICSharpCode.SharpZipLib.Zip; + /// + /// class MainClass + /// { + /// public static void Main(string[] args) + /// { + /// string[] filenames = Directory.GetFiles(args[0]); + /// byte[] buffer = new byte[4096]; + /// + /// using ( ZipOutputStream s = new ZipOutputStream(File.Create(args[1])) ) { + /// + /// s.SetLevel(9); // 0 - store only to 9 - means best compression + /// + /// foreach (string file in filenames) { + /// ZipEntry entry = new ZipEntry(file); + /// s.PutNextEntry(entry); + /// + /// using (FileStream fs = File.OpenRead(file)) { + /// StreamUtils.Copy(fs, s, buffer); + /// } + /// } + /// } + /// } + /// } + /// + /// + public class ZipOutputStream : DeflaterOutputStream + { + #region Constructors + /// + /// Creates a new Zip output stream, writing a zip archive. + /// + /// + /// The output stream to which the archive contents are written. + /// + public ZipOutputStream(Stream baseOutputStream) + : base(baseOutputStream, new Deflater(Deflater.DEFAULT_COMPRESSION, true)) + { + } + + /// + /// Creates a new Zip output stream, writing a zip archive. + /// + /// The output stream to which the archive contents are written. + /// Size of the buffer to use. + public ZipOutputStream( Stream baseOutputStream, int bufferSize ) + : base(baseOutputStream, new Deflater(Deflater.DEFAULT_COMPRESSION, true), bufferSize) + { + } + #endregion + + /// + /// Gets a flag value of true if the central header has been added for this archive; false if it has not been added. + /// + /// No further entries can be added once this has been done. + public bool IsFinished + { + get { + return entries == null; + } + } + + /// + /// Set the zip file comment. + /// + /// + /// The comment text for the entire archive. + /// + /// + /// The converted comment is longer than 0xffff bytes. + /// + public void SetComment(string comment) + { + // TODO: Its not yet clear how to handle unicode comments here. + byte[] commentBytes = ZipConstants.ConvertToArray(comment); + if (commentBytes.Length > 0xffff) { + throw new ArgumentOutOfRangeException("comment"); + } + zipComment = commentBytes; + } + + /// + /// Sets the compression level. The new level will be activated + /// immediately. + /// + /// The new compression level (1 to 9). + /// + /// Level specified is not supported. + /// + /// + public void SetLevel(int level) + { + deflater_.SetLevel(level); + defaultCompressionLevel = level; + } + + /// + /// Get the current deflater compression level + /// + /// The current compression level + public int GetLevel() + { + return deflater_.GetLevel(); + } + + /// + /// Get / set a value indicating how Zip64 Extension usage is determined when adding entries. + /// + /// Older archivers may not understand Zip64 extensions. + /// If backwards compatability is an issue be careful when adding entries to an archive. + /// Setting this property to off is workable but less desirable as in those circumstances adding a file + /// larger then 4GB will fail. + public UseZip64 UseZip64 + { + get { return useZip64_; } + set { useZip64_ = value; } + } + + /// + /// Write an unsigned short in little endian byte order. + /// + private void WriteLeShort(int value) + { + unchecked { + baseOutputStream_.WriteByte((byte)(value & 0xff)); + baseOutputStream_.WriteByte((byte)((value >> 8) & 0xff)); + } + } + + /// + /// Write an int in little endian byte order. + /// + private void WriteLeInt(int value) + { + unchecked { + WriteLeShort(value); + WriteLeShort(value >> 16); + } + } + + /// + /// Write an int in little endian byte order. + /// + private void WriteLeLong(long value) + { + unchecked { + WriteLeInt((int)value); + WriteLeInt((int)(value >> 32)); + } + } + + /// + /// Starts a new Zip entry. It automatically closes the previous + /// entry if present. + /// All entry elements bar name are optional, but must be correct if present. + /// If the compression method is stored and the output is not patchable + /// the compression for that entry is automatically changed to deflate level 0 + /// + /// + /// the entry. + /// + /// + /// if entry passed is null. + /// + /// + /// if an I/O error occured. + /// + /// + /// if stream was finished + /// + /// + /// Too many entries in the Zip file
+ /// Entry name is too long
+ /// Finish has already been called
+ ///
+ public void PutNextEntry(ZipEntry entry) + { + if ( entry == null ) { + throw new ArgumentNullException("entry"); + } + + if (entries == null) { + throw new InvalidOperationException("ZipOutputStream was finished"); + } + + if (curEntry != null) { + CloseEntry(); + } + + if (entries.Count == int.MaxValue) { + throw new ZipException("Too many entries for Zip file"); + } + + CompressionMethod method = entry.CompressionMethod; + int compressionLevel = defaultCompressionLevel; + + // Clear flags that the library manages internally + entry.Flags &= (int)GeneralBitFlags.UnicodeText; + patchEntryHeader = false; + + bool headerInfoAvailable; + + // No need to compress - definitely no data. + if (entry.Size == 0) + { + entry.CompressedSize = entry.Size; + entry.Crc = 0; + method = CompressionMethod.Stored; + headerInfoAvailable = true; + } + else + { + headerInfoAvailable = (entry.Size >= 0) && entry.HasCrc; + + // Switch to deflation if storing isnt possible. + if (method == CompressionMethod.Stored) + { + if (!headerInfoAvailable) + { + if (!CanPatchEntries) + { + // Can't patch entries so storing is not possible. + method = CompressionMethod.Deflated; + compressionLevel = 0; + } + } + else // entry.size must be > 0 + { + entry.CompressedSize = entry.Size; + headerInfoAvailable = entry.HasCrc; + } + } + } + + if (headerInfoAvailable == false) { + if (CanPatchEntries == false) { + // Only way to record size and compressed size is to append a data descriptor + // after compressed data. + + // Stored entries of this form have already been converted to deflating. + entry.Flags |= 8; + } else { + patchEntryHeader = true; + } + } + + if (Password != null) { + entry.IsCrypted = true; + if (entry.Crc < 0) { + // Need to append a data descriptor as the crc isnt available for use + // with encryption, the date is used instead. Setting the flag + // indicates this to the decompressor. + entry.Flags |= 8; + } + } + + entry.Offset = offset; + entry.CompressionMethod = (CompressionMethod)method; + + curMethod = method; + sizePatchPos = -1; + + if ( (useZip64_ == UseZip64.On) || ((entry.Size < 0) && (useZip64_ == UseZip64.Dynamic)) ) { + entry.ForceZip64(); + } + + // Write the local file header + WriteLeInt(ZipConstants.LocalHeaderSignature); + + WriteLeShort(entry.Version); + WriteLeShort(entry.Flags); + WriteLeShort((byte)entry.CompressionMethodForHeader); + WriteLeInt((int)entry.DosTime); + + // TODO: Refactor header writing. Its done in several places. + if (headerInfoAvailable == true) { + WriteLeInt((int)entry.Crc); + if ( entry.LocalHeaderRequiresZip64 ) { + WriteLeInt(-1); + WriteLeInt(-1); + } + else { + WriteLeInt(entry.IsCrypted ? (int)entry.CompressedSize + ZipConstants.CryptoHeaderSize : (int)entry.CompressedSize); + WriteLeInt((int)entry.Size); + } + } else { + if (patchEntryHeader) { + crcPatchPos = baseOutputStream_.Position; + } + WriteLeInt(0); // Crc + + if ( patchEntryHeader ) { + sizePatchPos = baseOutputStream_.Position; + } + + // For local header both sizes appear in Zip64 Extended Information + if ( entry.LocalHeaderRequiresZip64 || patchEntryHeader ) { + WriteLeInt(-1); + WriteLeInt(-1); + } + else { + WriteLeInt(0); // Compressed size + WriteLeInt(0); // Uncompressed size + } + } + + byte[] name = ZipConstants.ConvertToArray(entry.Flags, entry.Name); + + if (name.Length > 0xFFFF) { + throw new ZipException("Entry name too long."); + } + + ZipExtraData ed = new ZipExtraData(entry.ExtraData); + + if (entry.LocalHeaderRequiresZip64) { + ed.StartNewEntry(); + if (headerInfoAvailable) { + ed.AddLeLong(entry.Size); + ed.AddLeLong(entry.CompressedSize); + } + else { + ed.AddLeLong(-1); + ed.AddLeLong(-1); + } + ed.AddNewEntry(1); + + if ( !ed.Find(1) ) { + throw new ZipException("Internal error cant find extra data"); + } + + if ( patchEntryHeader ) { + sizePatchPos = ed.CurrentReadIndex; + } + } + else { + ed.Delete(1); + } + +#if !NET_1_1 && !NETCF_2_0 + if (entry.AESKeySize > 0) { + AddExtraDataAES(entry, ed); + } +#endif + byte[] extra = ed.GetEntryData(); + + WriteLeShort(name.Length); + WriteLeShort(extra.Length); + + if ( name.Length > 0 ) { + baseOutputStream_.Write(name, 0, name.Length); + } + + if ( entry.LocalHeaderRequiresZip64 && patchEntryHeader ) { + sizePatchPos += baseOutputStream_.Position; + } + + if ( extra.Length > 0 ) { + baseOutputStream_.Write(extra, 0, extra.Length); + } + + offset += ZipConstants.LocalHeaderBaseSize + name.Length + extra.Length; + // Fix offsetOfCentraldir for AES + if (entry.AESKeySize > 0) + offset += entry.AESOverheadSize; + + // Activate the entry. + curEntry = entry; + crc.Reset(); + if (method == CompressionMethod.Deflated) { + deflater_.Reset(); + deflater_.SetLevel(compressionLevel); + } + size = 0; + + if (entry.IsCrypted) { +#if !NET_1_1 && !NETCF_2_0 + if (entry.AESKeySize > 0) { + WriteAESHeader(entry); + } else +#endif + { + if (entry.Crc < 0) { // so testing Zip will says its ok + WriteEncryptionHeader(entry.DosTime << 16); + } else { + WriteEncryptionHeader(entry.Crc); + } + } + } + } + + /// + /// Closes the current entry, updating header and footer information as required + /// + /// + /// An I/O error occurs. + /// + /// + /// No entry is active. + /// + public void CloseEntry() + { + if (curEntry == null) { + throw new InvalidOperationException("No open entry"); + } + + long csize = size; + + // First finish the deflater, if appropriate + if (curMethod == CompressionMethod.Deflated) { + if (size >= 0) { + base.Finish(); + csize = deflater_.TotalOut; + } + else { + deflater_.Reset(); + } + } + + // Write the AES Authentication Code (a hash of the compressed and encrypted data) + if (curEntry.AESKeySize > 0) { + baseOutputStream_.Write(AESAuthCode, 0, 10); + } + + if (curEntry.Size < 0) { + curEntry.Size = size; + } else if (curEntry.Size != size) { + throw new ZipException("size was " + size + ", but I expected " + curEntry.Size); + } + + if (curEntry.CompressedSize < 0) { + curEntry.CompressedSize = csize; + } else if (curEntry.CompressedSize != csize) { + throw new ZipException("compressed size was " + csize + ", but I expected " + curEntry.CompressedSize); + } + + if (curEntry.Crc < 0) { + curEntry.Crc = crc.Value; + } else if (curEntry.Crc != crc.Value) { + throw new ZipException("crc was " + crc.Value + ", but I expected " + curEntry.Crc); + } + + offset += csize; + + if (curEntry.IsCrypted) { + if (curEntry.AESKeySize > 0) { + curEntry.CompressedSize += curEntry.AESOverheadSize; + + } else { + curEntry.CompressedSize += ZipConstants.CryptoHeaderSize; + } + } + + // Patch the header if possible + if (patchEntryHeader) { + patchEntryHeader = false; + + long curPos = baseOutputStream_.Position; + baseOutputStream_.Seek(crcPatchPos, SeekOrigin.Begin); + WriteLeInt((int)curEntry.Crc); + + if ( curEntry.LocalHeaderRequiresZip64 ) { + + if ( sizePatchPos == -1 ) { + throw new ZipException("Entry requires zip64 but this has been turned off"); + } + + baseOutputStream_.Seek(sizePatchPos, SeekOrigin.Begin); + WriteLeLong(curEntry.Size); + WriteLeLong(curEntry.CompressedSize); + } + else { + WriteLeInt((int)curEntry.CompressedSize); + WriteLeInt((int)curEntry.Size); + } + baseOutputStream_.Seek(curPos, SeekOrigin.Begin); + } + + // Add data descriptor if flagged as required + if ((curEntry.Flags & 8) != 0) { + WriteLeInt(ZipConstants.DataDescriptorSignature); + WriteLeInt(unchecked((int)curEntry.Crc)); + + if ( curEntry.LocalHeaderRequiresZip64 ) { + WriteLeLong(curEntry.CompressedSize); + WriteLeLong(curEntry.Size); + offset += ZipConstants.Zip64DataDescriptorSize; + } + else { + WriteLeInt((int)curEntry.CompressedSize); + WriteLeInt((int)curEntry.Size); + offset += ZipConstants.DataDescriptorSize; + } + } + + entries.Add(curEntry); + curEntry = null; + } + + void WriteEncryptionHeader(long crcValue) + { + offset += ZipConstants.CryptoHeaderSize; + + InitializePassword(Password); + + byte[] cryptBuffer = new byte[ZipConstants.CryptoHeaderSize]; + Random rnd = new Random(); + rnd.NextBytes(cryptBuffer); + cryptBuffer[11] = (byte)(crcValue >> 24); + + EncryptBlock(cryptBuffer, 0, cryptBuffer.Length); + baseOutputStream_.Write(cryptBuffer, 0, cryptBuffer.Length); + } + +#if !NET_1_1 && !NETCF_2_0 + private static void AddExtraDataAES(ZipEntry entry, ZipExtraData extraData) { + + // Vendor Version: AE-1 IS 1. AE-2 is 2. With AE-2 no CRC is required and 0 is stored. + const int VENDOR_VERSION = 2; + // Vendor ID is the two ASCII characters "AE". + const int VENDOR_ID = 0x4541; //not 6965; + extraData.StartNewEntry(); + // Pack AES extra data field see http://www.winzip.com/aes_info.htm + //extraData.AddLeShort(7); // Data size (currently 7) + extraData.AddLeShort(VENDOR_VERSION); // 2 = AE-2 + extraData.AddLeShort(VENDOR_ID); // "AE" + extraData.AddData(entry.AESEncryptionStrength); // 1 = 128, 2 = 192, 3 = 256 + extraData.AddLeShort((int)entry.CompressionMethod); // The actual compression method used to compress the file + extraData.AddNewEntry(0x9901); + } + + // Replaces WriteEncryptionHeader for AES + // + private void WriteAESHeader(ZipEntry entry) { + byte[] salt; + byte[] pwdVerifier; + InitializeAESPassword(entry, Password, out salt, out pwdVerifier); + // File format for AES: + // Size (bytes) Content + // ------------ ------- + // Variable Salt value + // 2 Password verification value + // Variable Encrypted file data + // 10 Authentication code + // + // Value in the "compressed size" fields of the local file header and the central directory entry + // is the total size of all the items listed above. In other words, it is the total size of the + // salt value, password verification value, encrypted data, and authentication code. + baseOutputStream_.Write(salt, 0, salt.Length); + baseOutputStream_.Write(pwdVerifier, 0, pwdVerifier.Length); + } +#endif + + /// + /// Writes the given buffer to the current entry. + /// + /// The buffer containing data to write. + /// The offset of the first byte to write. + /// The number of bytes to write. + /// Archive size is invalid + /// No entry is active. + public override void Write(byte[] buffer, int offset, int count) + { + if (curEntry == null) { + throw new InvalidOperationException("No open entry."); + } + + if ( buffer == null ) { + throw new ArgumentNullException("buffer"); + } + + if ( offset < 0 ) { +#if NETCF_1_0 + throw new ArgumentOutOfRangeException("offset"); +#else + throw new ArgumentOutOfRangeException("offset", "Cannot be negative"); +#endif + } + + if ( count < 0 ) { +#if NETCF_1_0 + throw new ArgumentOutOfRangeException("count"); +#else + throw new ArgumentOutOfRangeException("count", "Cannot be negative"); +#endif + } + + if ( (buffer.Length - offset) < count ) { + throw new ArgumentException("Invalid offset/count combination"); + } + + crc.Update(buffer, offset, count); + size += count; + + switch (curMethod) { + case CompressionMethod.Deflated: + base.Write(buffer, offset, count); + break; + + case CompressionMethod.Stored: + if (Password != null) { + CopyAndEncrypt(buffer, offset, count); + } else { + baseOutputStream_.Write(buffer, offset, count); + } + break; + } + } + + void CopyAndEncrypt(byte[] buffer, int offset, int count) + { + const int CopyBufferSize = 4096; + byte[] localBuffer = new byte[CopyBufferSize]; + while ( count > 0 ) { + int bufferCount = (count < CopyBufferSize) ? count : CopyBufferSize; + + Array.Copy(buffer, offset, localBuffer, 0, bufferCount); + EncryptBlock(localBuffer, 0, bufferCount); + baseOutputStream_.Write(localBuffer, 0, bufferCount); + count -= bufferCount; + offset += bufferCount; + } + } + + /// + /// Finishes the stream. This will write the central directory at the + /// end of the zip file and flush the stream. + /// + /// + /// This is automatically called when the stream is closed. + /// + /// + /// An I/O error occurs. + /// + /// + /// Comment exceeds the maximum length
+ /// Entry name exceeds the maximum length + ///
+ public override void Finish() + { + if (entries == null) { + return; + } + + if (curEntry != null) { + CloseEntry(); + } + + long numEntries = entries.Count; + long sizeEntries = 0; + + foreach (ZipEntry entry in entries) { + WriteLeInt(ZipConstants.CentralHeaderSignature); + WriteLeShort(ZipConstants.VersionMadeBy); + WriteLeShort(entry.Version); + WriteLeShort(entry.Flags); + WriteLeShort((short)entry.CompressionMethodForHeader); + WriteLeInt((int)entry.DosTime); + WriteLeInt((int)entry.Crc); + + if ( entry.IsZip64Forced() || + (entry.CompressedSize >= uint.MaxValue) ) + { + WriteLeInt(-1); + } + else { + WriteLeInt((int)entry.CompressedSize); + } + + if ( entry.IsZip64Forced() || + (entry.Size >= uint.MaxValue) ) + { + WriteLeInt(-1); + } + else { + WriteLeInt((int)entry.Size); + } + + byte[] name = ZipConstants.ConvertToArray(entry.Flags, entry.Name); + + if (name.Length > 0xffff) { + throw new ZipException("Name too long."); + } + + ZipExtraData ed = new ZipExtraData(entry.ExtraData); + + if ( entry.CentralHeaderRequiresZip64 ) { + ed.StartNewEntry(); + if ( entry.IsZip64Forced() || + (entry.Size >= 0xffffffff) ) + { + ed.AddLeLong(entry.Size); + } + + if ( entry.IsZip64Forced() || + (entry.CompressedSize >= 0xffffffff) ) + { + ed.AddLeLong(entry.CompressedSize); + } + + if ( entry.Offset >= 0xffffffff ) + { + ed.AddLeLong(entry.Offset); + } + + ed.AddNewEntry(1); + } + else { + ed.Delete(1); + } + +#if !NET_1_1 && !NETCF_2_0 + if (entry.AESKeySize > 0) { + AddExtraDataAES(entry, ed); + } +#endif + byte[] extra = ed.GetEntryData(); + + byte[] entryComment = + (entry.Comment != null) ? + ZipConstants.ConvertToArray(entry.Flags, entry.Comment) : + new byte[0]; + + if (entryComment.Length > 0xffff) { + throw new ZipException("Comment too long."); + } + + WriteLeShort(name.Length); + WriteLeShort(extra.Length); + WriteLeShort(entryComment.Length); + WriteLeShort(0); // disk number + WriteLeShort(0); // internal file attributes + // external file attributes + + if (entry.ExternalFileAttributes != -1) { + WriteLeInt(entry.ExternalFileAttributes); + } else { + if (entry.IsDirectory) { // mark entry as directory (from nikolam.AT.perfectinfo.com) + WriteLeInt(16); + } else { + WriteLeInt(0); + } + } + + if ( entry.Offset >= uint.MaxValue ) { + WriteLeInt(-1); + } + else { + WriteLeInt((int)entry.Offset); + } + + if ( name.Length > 0 ) { + baseOutputStream_.Write(name, 0, name.Length); + } + + if ( extra.Length > 0 ) { + baseOutputStream_.Write(extra, 0, extra.Length); + } + + if ( entryComment.Length > 0 ) { + baseOutputStream_.Write(entryComment, 0, entryComment.Length); + } + + sizeEntries += ZipConstants.CentralHeaderBaseSize + name.Length + extra.Length + entryComment.Length; + } + + using ( ZipHelperStream zhs = new ZipHelperStream(baseOutputStream_) ) { + zhs.WriteEndOfCentralDirectory(numEntries, sizeEntries, offset, zipComment); + } + + entries = null; + } + + #region Instance Fields + /// + /// The entries for the archive. + /// + ArrayList entries = new ArrayList(); + + /// + /// Used to track the crc of data added to entries. + /// + Crc32 crc = new Crc32(); + + /// + /// The current entry being added. + /// + ZipEntry curEntry; + + int defaultCompressionLevel = Deflater.DEFAULT_COMPRESSION; + + CompressionMethod curMethod = CompressionMethod.Deflated; + + /// + /// Used to track the size of data for an entry during writing. + /// + long size; + + /// + /// Offset to be recorded for each entry in the central header. + /// + long offset; + + /// + /// Comment for the entire archive recorded in central header. + /// + byte[] zipComment = new byte[0]; + + /// + /// Flag indicating that header patching is required for the current entry. + /// + bool patchEntryHeader; + + /// + /// Position to patch crc + /// + long crcPatchPos = -1; + + /// + /// Position to patch size. + /// + long sizePatchPos = -1; + + // Default is dynamic which is not backwards compatible and can cause problems + // with XP's built in compression which cant read Zip64 archives. + // However it does avoid the situation were a large file is added and cannot be completed correctly. + // NOTE: Setting the size for entries before they are added is the best solution! + UseZip64 useZip64_ = UseZip64.Dynamic; + #endregion + } +} diff --git a/src/GitHub.Api/Tasks/ActionTask.cs b/src/GitHub.Api/Tasks/ActionTask.cs index 344704561..d42a9d1ff 100644 --- a/src/GitHub.Api/Tasks/ActionTask.cs +++ b/src/GitHub.Api/Tasks/ActionTask.cs @@ -7,7 +7,7 @@ namespace GitHub.Unity { - class TaskQueue : TPLTask + public class TaskQueue : TPLTask { private TaskCompletionSource aggregateTask = new TaskCompletionSource(); private readonly List queuedTasks = new List(); @@ -31,15 +31,31 @@ public ITask Queue(ITask task) public override void RunSynchronously() { - foreach (var task in queuedTasks) - task.Start(); + if (queuedTasks.Any()) + { + foreach (var task in queuedTasks) + task.Start(); + } + else + { + aggregateTask.TrySetResult(true); + } + base.RunSynchronously(); } protected override void Schedule() { - foreach (var task in queuedTasks) - task.Start(); + if (queuedTasks.Any()) + { + foreach (var task in queuedTasks) + task.Start(); + } + else + { + aggregateTask.TrySetResult(true); + } + base.Schedule(); } @@ -69,7 +85,7 @@ private void TaskFinished(bool success, Exception ex) } } - class TaskQueue : TPLTask> + public class TaskQueue : TPLTask> { private TaskCompletionSource> aggregateTask = new TaskCompletionSource>(); private readonly List> queuedTasks = new List>(); @@ -115,15 +131,31 @@ public ITask Queue(ITask task) public override List RunSynchronously() { - foreach (var task in queuedTasks) - task.Start(); + if (queuedTasks.Any()) + { + foreach (var task in queuedTasks) + task.Start(); + } + else + { + aggregateTask.TrySetResult(new List()); + } + return base.RunSynchronously(); } protected override void Schedule() { - foreach (var task in queuedTasks) - task.Start(); + if (queuedTasks.Any()) + { + foreach (var task in queuedTasks) + task.Start(); + } + else + { + aggregateTask.TrySetResult(new List()); + } + base.Schedule(); } @@ -158,7 +190,7 @@ private void TaskFinished(TTaskResult result, bool success, Exception ex) } } - class TPLTask : TaskBase + public class TPLTask : TaskBase { private Task task; @@ -203,7 +235,7 @@ protected override void Run(bool success) } } - class TPLTask : TaskBase + public class TPLTask : TaskBase { private Task task; @@ -248,7 +280,7 @@ protected override T RunWithReturn(bool success) } } - class ActionTask : TaskBase + public class ActionTask : TaskBase { protected Action Callback { get; } protected Action CallbackWithException { get; } @@ -297,7 +329,7 @@ protected override void Run(bool success) } } - class ActionTask : TaskBase + public class ActionTask : TaskBase { private readonly Func getPreviousResult; @@ -383,7 +415,7 @@ protected virtual void Run(bool success, T previousResult) public T PreviousResult { get; set; } = default(T); } - class FuncTask : TaskBase + public class FuncTask : TaskBase { protected Func Callback { get; } protected Func CallbackWithException { get; } @@ -436,7 +468,7 @@ protected override T RunWithReturn(bool success) } } - class FuncTask : TaskBase + public class FuncTask : TaskBase { protected Func Callback { get; } protected Func CallbackWithException { get; } @@ -481,7 +513,7 @@ protected override TResult RunWithData(bool success, T previousResult) } } - class FuncListTask : DataTaskBase> + public class FuncListTask : DataTaskBase> { protected Func> Callback { get; } protected Func, List> CallbackWithSelf { get; } @@ -541,7 +573,7 @@ protected override List RunWithReturn(bool success) } } - class FuncListTask : DataTaskBase> + public class FuncListTask : DataTaskBase> { protected Func> Callback { get; } protected Func> CallbackWithException { get; } diff --git a/src/GitHub.Api/Managers/Downloader.cs b/src/GitHub.Api/Tasks/Downloader.cs similarity index 100% rename from src/GitHub.Api/Managers/Downloader.cs rename to src/GitHub.Api/Tasks/Downloader.cs diff --git a/src/GitHub.Api/Tasks/OctorunTask.cs b/src/GitHub.Api/Tasks/OctorunTask.cs index f64fcc0cc..06c3c6c0c 100644 --- a/src/GitHub.Api/Tasks/OctorunTask.cs +++ b/src/GitHub.Api/Tasks/OctorunTask.cs @@ -54,28 +54,24 @@ class OctorunTask : ProcessTask { private readonly string clientId; private readonly string clientSecret; - private readonly string user; private readonly string userToken; private readonly NPath pathToNodeJs; private readonly NPath pathToOctorunJs; private readonly string arguments; - public OctorunTask(CancellationToken token, NPath pathToNodeJs, NPath pathToOctorunJs, string arguments, - string clientId = null, - string clientSecret = null, - string user = null, + public OctorunTask(CancellationToken token, IEnvironment environment, + string arguments, string userToken = null, IOutputProcessor processor = null) : base(token, processor ?? new OctorunResultOutputProcessor()) { - this.clientId = clientId; - this.clientSecret = clientSecret; - this.user = user; - this.userToken = userToken; - this.pathToNodeJs = pathToNodeJs; - this.pathToOctorunJs = pathToOctorunJs; + this.clientId = ApplicationInfo.ClientId; + this.clientSecret = ApplicationInfo.ClientSecret; + this.pathToNodeJs = environment.NodeJsExecutablePath; + this.pathToOctorunJs = environment.OctorunScriptPath; this.arguments = $"\"{pathToOctorunJs}\" {arguments}"; + this.userToken = userToken; } public override void Configure(ProcessStartInfo psi) @@ -85,21 +81,8 @@ public override void Configure(ProcessStartInfo psi) psi.WorkingDirectory = pathToOctorunJs.Parent.Parent.Parent; psi.EnvironmentVariables.Add("OCTOKIT_USER_AGENT", $"{ApplicationInfo.ApplicationSafeName}/{ApplicationInfo.Version}"); - - if (clientId != null) - { - psi.EnvironmentVariables.Add("OCTOKIT_CLIENT_ID", clientId); - } - - if (clientSecret != null) - { - psi.EnvironmentVariables.Add("OCTOKIT_CLIENT_SECRET", clientSecret); - } - - if (user != null) - { - psi.EnvironmentVariables.Add("OCTORUN_USER", user); - } + psi.EnvironmentVariables.Add("OCTOKIT_CLIENT_ID", clientId); + psi.EnvironmentVariables.Add("OCTOKIT_CLIENT_SECRET", clientSecret); if (userToken != null) { diff --git a/src/GitHub.Api/Tasks/ProcessTask.cs b/src/GitHub.Api/Tasks/ProcessTask.cs index c6802c4bc..66cb5a3f2 100644 --- a/src/GitHub.Api/Tasks/ProcessTask.cs +++ b/src/GitHub.Api/Tasks/ProcessTask.cs @@ -9,7 +9,7 @@ namespace GitHub.Unity { - static class ProcessTaskExtensions + public static class ProcessTaskExtensions { public static T Configure(this T task, IProcessManager processManager, bool withInput) where T : IProcess @@ -117,15 +117,22 @@ public void Run() { Process.OutputDataReceived += (s, e) => { - lastOutput = DateTimeOffset.UtcNow; - gotOutput.Set(); - if (e.Data != null) + try { - var line = Encoding.UTF8.GetString(Encoding.UTF8.GetBytes(e.Data)); - outputProcessor.LineReceived(line.TrimEnd('\r','\n')); + lastOutput = DateTimeOffset.UtcNow; + gotOutput.Set(); + if (e.Data != null) + { + var line = Encoding.UTF8.GetString(Encoding.UTF8.GetBytes(e.Data)); + outputProcessor.LineReceived(line.TrimEnd('\r', '\n')); + } + else + outputProcessor.LineReceived(null); + } + catch (Exception ex) + { + Logger.Error(ex); } - else - outputProcessor.LineReceived(null); }; } @@ -256,7 +263,7 @@ private bool WaitForExit(int milliseconds) ///
/// The type of the results. If it's a List<> or similar, then specify the full List<> type here and the inner type of the List in /// If is a list or similar, then specify its inner type here - class ProcessTask : TaskBase, IProcessTask + public class ProcessTask : TaskBase, IProcessTask { private IOutputProcessor outputProcessor; private ProcessWrapper wrapper; @@ -395,7 +402,7 @@ public override string ToString() public virtual string ProcessArguments { get; } } - class ProcessTaskWithListOutput : DataTaskBase>, IProcessTask> + public class ProcessTaskWithListOutput : DataTaskBase>, IProcessTask> { private IOutputProcessor> outputProcessor; private Exception thrownException = null; @@ -543,7 +550,7 @@ public FirstNonNullLineProcessTask(CancellationToken token, string arguments) public override string ProcessArguments => arguments; } - class SimpleProcessTask : ProcessTask + public class SimpleProcessTask : ProcessTask { private readonly NPath? fullPathToExecutable; private readonly string arguments; @@ -565,7 +572,7 @@ public SimpleProcessTask(CancellationToken token, string arguments, IOutputProce public override string ProcessArguments => arguments; } - class SimpleListProcessTask : ProcessTaskWithListOutput + public class SimpleListProcessTask : ProcessTaskWithListOutput { private readonly NPath fullPathToExecutable; private readonly string arguments; diff --git a/src/GitHub.Api/Tasks/TaskBase.cs b/src/GitHub.Api/Tasks/TaskBase.cs index 99d169152..768c011d5 100644 --- a/src/GitHub.Api/Tasks/TaskBase.cs +++ b/src/GitHub.Api/Tasks/TaskBase.cs @@ -547,7 +547,7 @@ public override string ToString() public virtual string Message { get; set; } } - abstract class TaskBase : TaskBase, ITask + public abstract class TaskBase : TaskBase, ITask { private event Action finallyHandler; @@ -723,7 +723,7 @@ protected override void CallFinallyHandler() public TResult Result { get { return result; } } } - abstract class TaskBase : TaskBase + public abstract class TaskBase : TaskBase { private readonly Func getPreviousResult; @@ -770,7 +770,7 @@ protected virtual TResult RunWithData(bool success, T previousResult) public T PreviousResult { get; set; } = default(T); } - abstract class DataTaskBase : TaskBase, ITask + public abstract class DataTaskBase : TaskBase, ITask { public DataTaskBase(CancellationToken token) : base(token) @@ -783,7 +783,7 @@ protected void RaiseOnData(TData data) } } - abstract class DataTaskBase : TaskBase, ITask + public abstract class DataTaskBase : TaskBase, ITask { public DataTaskBase(CancellationToken token) : base(token) diff --git a/src/GitHub.Api/Tasks/TaskCanceledExceptions.cs b/src/GitHub.Api/Tasks/TaskCanceledExceptions.cs index 92234f0da..8ffb27fa8 100644 --- a/src/GitHub.Api/Tasks/TaskCanceledExceptions.cs +++ b/src/GitHub.Api/Tasks/TaskCanceledExceptions.cs @@ -5,7 +5,7 @@ namespace GitHub.Unity { [Serializable] - class DependentTaskFailedException : TaskCanceledException + public class DependentTaskFailedException : TaskCanceledException { protected DependentTaskFailedException() : base() { } @@ -21,7 +21,7 @@ public DependentTaskFailedException(ITask task, Exception ex) : this(ex.InnerExc } [Serializable] - class ProcessException : TaskCanceledException + public class ProcessException : TaskCanceledException { public int ErrorCode { get; } @@ -45,4 +45,4 @@ protected ProcessException(SerializationInfo info, StreamingContext context) : b public ProcessException(ITask process) : this(process.Errors) { } } -} \ No newline at end of file +} diff --git a/src/GitHub.Api/Tasks/TaskExtensions.cs b/src/GitHub.Api/Tasks/TaskExtensions.cs index 57e17170d..fa634aaa9 100644 --- a/src/GitHub.Api/Tasks/TaskExtensions.cs +++ b/src/GitHub.Api/Tasks/TaskExtensions.cs @@ -4,7 +4,7 @@ namespace GitHub.Unity { - static class TaskExtensions + public static class TaskExtensions { public static async Task StartAwait(this ITask source, Action handler = null) { diff --git a/src/GitHub.Api/Tasks/TaskManager.cs b/src/GitHub.Api/Tasks/TaskManager.cs index 5bff94012..7a3e80749 100644 --- a/src/GitHub.Api/Tasks/TaskManager.cs +++ b/src/GitHub.Api/Tasks/TaskManager.cs @@ -5,7 +5,7 @@ namespace GitHub.Unity { - class TaskManager : ITaskManager + public class TaskManager : ITaskManager { private static readonly ILogging logger = LogHelper.GetLogger(); @@ -17,7 +17,19 @@ class TaskManager : ITaskManager public CancellationToken Token { get { return cts.Token; } } private static ITaskManager instance; - public static ITaskManager Instance => instance; + public static ITaskManager Instance + { + get + { + if (instance == null) + { + instance = new TaskManager(); + } + + return instance; + } + } + private ProgressReporter progressReporter = new ProgressReporter(); public event Action OnProgress diff --git a/src/GitHub.Api/UI/TreeBase.cs b/src/GitHub.Api/UI/TreeBase.cs index d3cfb88d1..57c299ed5 100644 --- a/src/GitHub.Api/UI/TreeBase.cs +++ b/src/GitHub.Api/UI/TreeBase.cs @@ -54,7 +54,7 @@ public void Load(IEnumerable treeDatas) TNode lastAddedNode = null; Clear(); - AddNode(Title, Title, -1 + displayRootLevel, true, false, false, false, isSelected, false, null, false); + AddNode(Title, Title, -1 + displayRootLevel, true, false, false, false, isSelected, false, null); foreach (var treeData in treeDatas) { @@ -118,13 +118,12 @@ public void Load(IEnumerable treeDatas) { isActive = treeData.IsActive; treeNodeTreeData = treeData; - isChecked = isCheckable && checkedFiles.Contains(nodePath); + isChecked = isCheckable && (checkedFiles.Contains(nodePath) || treeData.IsChecked); } isSelected = selectedNodePath != null && nodePath == selectedNodePath; - lastAddedNode = AddNode(nodePath, label, level + displayRootLevel + (parentIsPromoted ? 1 : 0), isFolder, isActive, nodeIsHidden, - nodeIsCollapsed, isSelected, isChecked, treeNodeTreeData, false); + lastAddedNode = AddNode(nodePath, label, level + displayRootLevel + (parentIsPromoted ? 1 : 0), isFolder, isActive, nodeIsHidden, nodeIsCollapsed, isSelected, isChecked, treeNodeTreeData); } } } @@ -184,29 +183,15 @@ protected bool PromoteNode(TNode previouslyAddedNode, string nextLabel) public void SetCheckStateOnAll(bool isChecked) { - var nodeCheckState = isChecked ? CheckState.Checked : CheckState.Empty; foreach (var node in Nodes) { - var wasChecked = node.CheckState == CheckState.Checked; - node.CheckState = nodeCheckState; - - if (!node.IsFolder) - { - if (isChecked && !wasChecked) - { - AddCheckedNode(node); - } - else if (!isChecked && wasChecked) - { - RemoveCheckedNode(node); - } - } + SetCheckStateOnNode(node, isChecked); } } - protected TNode AddNode(string path, string label, int level, bool isFolder, bool isActive, bool isHidden, bool isCollapsed, bool isSelected, bool isChecked, TData? treeData, bool isContainer) + protected TNode AddNode(string path, string label, int level, bool isFolder, bool isActive, bool isHidden, bool isCollapsed, bool isSelected, bool isChecked, TData? treeData) { - var node = CreateTreeNode(path, label, level, isFolder, isActive, isHidden, isCollapsed, isChecked, treeData, isContainer); + var node = CreateTreeNode(path, label, level, isFolder, isActive, isHidden, isCollapsed, isChecked, treeData); SetNodeIcon(node); Nodes.Add(node); @@ -251,39 +236,32 @@ protected void ToggleNodeVisibility(int idx, TNode node) protected void ToggleNodeChecked(int idx, TNode node) { + CheckState checkState; var isChecked = false; - switch (node.CheckState) { case CheckState.Mixed: case CheckState.Empty: - node.CheckState = CheckState.Checked; + checkState = CheckState.Checked; isChecked = true; break; case CheckState.Checked: - node.CheckState = CheckState.Empty; + checkState = CheckState.Empty; break; - } - if (!node.IsFolder) - { - if (isChecked) - { - AddCheckedNode(node); - } - else - { - RemoveCheckedNode(node); - } + default: + throw new ArgumentOutOfRangeException("Unknown CheckState"); } + SetCheckStateOnNode(node, checkState); + if (node.IsFolderOrContainer) { ToggleChildrenChecked(idx, node, isChecked); } - ToggleParentFoldersChecked(idx, node, isChecked); + ToggleParentFolderAndContainersChecked(idx, node, checkState); } private void ToggleChildrenChecked(int idx, TNode node, bool isChecked) @@ -291,20 +269,8 @@ private void ToggleChildrenChecked(int idx, TNode node, bool isChecked) for (var i = idx + 1; i < Nodes.Count && node.Level < Nodes[i].Level; i++) { var childNode = Nodes[i]; - var wasChecked = childNode.CheckState == CheckState.Checked; - childNode.CheckState = isChecked ? CheckState.Checked : CheckState.Empty; - if (!childNode.IsFolder) - { - if (isChecked && !wasChecked) - { - AddCheckedNode(childNode); - } - else if (!isChecked && wasChecked) - { - RemoveCheckedNode(childNode); - } - } + SetCheckStateOnNode(childNode, isChecked); if (childNode.IsFolderOrContainer) { @@ -333,9 +299,36 @@ private List GetLeafNodes(TNode node, int idx) return results; } + private void SetCheckStateOnNode(TNode node, bool setChecked) + { + SetCheckStateOnNode(node, setChecked ? CheckState.Checked : CheckState.Empty); + } - private void ToggleParentFoldersChecked(int idx, TNode node, bool isChecked) + private void SetCheckStateOnNode(TNode node, CheckState setCheckState) { + var isChecked = setCheckState == CheckState.Checked + || setCheckState == CheckState.Mixed; + + var wasChecked = node.CheckState == CheckState.Checked; + + node.CheckState = setCheckState; + + if (!node.IsFolder) + { + if (isChecked && !wasChecked) + { + AddCheckedNode(node); + } + else if (!isChecked && wasChecked) + { + RemoveCheckedNode(node); + } + } + } + + private void ToggleParentFolderAndContainersChecked(int idx, TNode node, CheckState checkState) + { + var isChecked = checkState != CheckState.Empty; while (true) { if (node.Level > 0) @@ -385,14 +378,13 @@ private void ToggleParentFoldersChecked(int idx, TNode node, bool isChecked) var parentIndex = firstSiblingIndex - 1; var parentNode = Nodes[parentIndex]; - if (siblingsInSameState) - { - parentNode.CheckState = isChecked ? CheckState.Checked : CheckState.Empty; - } - else - { - parentNode.CheckState = CheckState.Mixed; - } + + var parentNodeState = + siblingsInSameState + ? node.CheckState + : CheckState.Mixed; + + SetCheckStateOnNode(parentNode, parentNodeState); idx = parentIndex; node = parentNode; @@ -407,7 +399,8 @@ private void ToggleParentFoldersChecked(int idx, TNode node, bool isChecked) protected abstract IEnumerable GetCollapsedFolders(); protected abstract void RemoveCheckedNode(TNode node); protected abstract void AddCheckedNode(TNode node); - protected abstract TNode CreateTreeNode(string path, string label, int level, bool isFolder, bool isActive, bool isHidden, bool isCollapsed, bool isChecked, TData? treeData, bool isContainer); + protected abstract TNode CreateTreeNode(string path, string label, int level, bool isFolder, bool isActive, bool isHidden, bool isCollapsed, bool isChecked, TData? treeData); + protected abstract void SetNodeIcon(TNode node); public string SelectedNodePath => SelectedNode?.Path; diff --git a/src/GitHub.Logging/Extensions/ExceptionExtensions.cs b/src/GitHub.Logging/Extensions/ExceptionExtensions.cs index b22772799..9c4c0f71c 100644 --- a/src/GitHub.Logging/Extensions/ExceptionExtensions.cs +++ b/src/GitHub.Logging/Extensions/ExceptionExtensions.cs @@ -3,7 +3,7 @@ namespace GitHub.Logging { - static class ExceptionExtensions + public static class ExceptionExtensions { public static string GetExceptionMessage(this Exception ex) { diff --git a/src/GitHub.Logging/GitHub.Logging.v3.ncrunchproject b/src/GitHub.Logging/GitHub.Logging.v3.ncrunchproject index 6d9cc8a63..9e3e80dd6 100644 --- a/src/GitHub.Logging/GitHub.Logging.v3.ncrunchproject +++ b/src/GitHub.Logging/GitHub.Logging.v3.ncrunchproject @@ -1,5 +1,11 @@  + + ..\..\script\lib\UnityExtensions\Unity\TestRunner\UnityEngine.TestRunner.dll + ..\..\script\lib\UnityExtensions\Unity\TestRunner\Editor\UnityEditor.TestRunner.dll + ..\..\script\lib\Managed\UnityEditor.dll + ..\..\script\lib\Managed\UnityEngine.dll + True diff --git a/src/UnityExtension/.gitignore b/src/UnityExtension/.gitignore index a2411d2f3..848aaec94 100644 --- a/src/UnityExtension/.gitignore +++ b/src/UnityExtension/.gitignore @@ -1,4 +1,7 @@ *.csproj UnityPackageManager JetBrains -UnityExtension.sln \ No newline at end of file +UnityExtension.sln +Assets/**/*.zip +Assets/**/*.md5 +Assets/**/*.json diff --git a/src/UnityExtension/Assets/Editor/GitHub.Unity/ApplicationCache.cs b/src/UnityExtension/Assets/Editor/GitHub.Unity/ApplicationCache.cs index 0a2160be2..272566cf7 100644 --- a/src/UnityExtension/Assets/Editor/GitHub.Unity/ApplicationCache.cs +++ b/src/UnityExtension/Assets/Editor/GitHub.Unity/ApplicationCache.cs @@ -147,6 +147,7 @@ public IEnvironment Environment cacheContainer.SetCacheInitializer(CacheType.GitAheadBehind, () => GitAheadBehindCache.Instance); cacheContainer.SetCacheInitializer(CacheType.GitLocks, () => GitLocksCache.Instance); cacheContainer.SetCacheInitializer(CacheType.GitLog, () => GitLogCache.Instance); + cacheContainer.SetCacheInitializer(CacheType.GitFileLog, () => GitFileLogCache.Instance); cacheContainer.SetCacheInitializer(CacheType.GitStatus, () => GitStatusCache.Instance); cacheContainer.SetCacheInitializer(CacheType.GitUser, () => GitUserCache.Instance); cacheContainer.SetCacheInitializer(CacheType.RepositoryInfo, () => RepositoryInfoCache.Instance); @@ -213,12 +214,14 @@ public void InvalidateData() private void Invalidate() { - if (!isInvalidating) - { - isInvalidating = true; - LastUpdatedAt = DateTimeOffset.MinValue; - CacheInvalidated.SafeInvoke(CacheType); - } + isInvalidating = true; + LastUpdatedAt = DateTimeOffset.MinValue; + CacheInvalidated.SafeInvoke(CacheType); + } + + public void ResetInvalidation() + { + isInvalidating = false; } protected void SaveData(DateTimeOffset now, bool isChanged) @@ -463,25 +466,25 @@ public void UpdateData(IRepositoryInfoCacheData data) isUpdated = true; } - if (forcedInvalidation ||!Nullable.Equals(currentGitBranch, data.CurrentGitBranch)) + if (forcedInvalidation || !Nullable.Equals(currentGitBranch, data.CurrentGitBranch)) { currentGitBranch = data.CurrentGitBranch ?? GitBranch.Default; isUpdated = true; } - if (forcedInvalidation ||!Nullable.Equals(currentConfigRemote, data.CurrentConfigRemote)) + if (forcedInvalidation || !Nullable.Equals(currentConfigRemote, data.CurrentConfigRemote)) { currentConfigRemote = data.CurrentConfigRemote ?? ConfigRemote.Default; isUpdated = true; } - if (forcedInvalidation ||!Nullable.Equals(currentConfigBranch, data.CurrentConfigBranch)) + if (forcedInvalidation || !Nullable.Equals(currentConfigBranch, data.CurrentConfigBranch)) { currentConfigBranch = data.CurrentConfigBranch ?? ConfigBranch.Default; isUpdated = true; } - if (forcedInvalidation ||!String.Equals(currentHead, data.CurrentHead)) + if (forcedInvalidation || !String.Equals(currentHead, data.CurrentHead)) { currentHead = data.CurrentHead; isUpdated = true; @@ -620,6 +623,46 @@ public List Log public override TimeSpan DataTimeout { get { return TimeSpan.FromMinutes(1); } } } + [Location("cache/gitfilelog.yaml", LocationAttribute.Location.LibraryFolder)] + sealed class GitFileLogCache : ManagedCacheBase, IGitFileLogCache + { + [SerializeField] private GitFileLog fileLog = GitFileLog.Default; + + public GitFileLogCache() : base(CacheType.GitFileLog) + { } + + public GitFileLog FileLog + { + get + { + ValidateData(); + return fileLog; + } + set + { + var now = DateTimeOffset.Now; + var isUpdated = false; + + var shouldUpdate = forcedInvalidation; + + if (!shouldUpdate) + { + shouldUpdate = true; + } + + if (shouldUpdate) + { + fileLog = value; + isUpdated = true; + } + + SaveData(now, isUpdated); + } + } + + public override TimeSpan DataTimeout { get { return TimeSpan.FromMinutes(1); } } + } + [Location("cache/gittrackingstatus.yaml", LocationAttribute.Location.LibraryFolder)] sealed class GitAheadBehindCache : ManagedCacheBase, IGitAheadBehindCache { diff --git a/src/UnityExtension/Assets/Editor/GitHub.Unity/EntryPoint.cs b/src/UnityExtension/Assets/Editor/GitHub.Unity/EntryPoint.cs index 3d99d48f9..b1d83ca2c 100644 --- a/src/UnityExtension/Assets/Editor/GitHub.Unity/EntryPoint.cs +++ b/src/UnityExtension/Assets/Editor/GitHub.Unity/EntryPoint.cs @@ -1,9 +1,6 @@ using GitHub.Logging; using System; using System.IO; -using System.Net; -using System.Net.Security; -using System.Security.Cryptography.X509Certificates; using UnityEditor; using UnityEngine; diff --git a/src/UnityExtension/Assets/Editor/GitHub.Unity/ExtensionLoader/ExtensionLoader.asmdef b/src/UnityExtension/Assets/Editor/GitHub.Unity/ExtensionLoader/ExtensionLoader.asmdef new file mode 100644 index 000000000..4408c5c12 --- /dev/null +++ b/src/UnityExtension/Assets/Editor/GitHub.Unity/ExtensionLoader/ExtensionLoader.asmdef @@ -0,0 +1,15 @@ +{ + "name": "ExtensionLoader", + "references": [], + "optionalUnityReferences": [], + "includePlatforms": [ + "Editor" + ], + "excludePlatforms": [], + "allowUnsafeCode": false, + "overrideReferences": true, + "precompiledReferences": [ + "GitHub.UnityShim.dll" + ], + "autoReferenced": true +} \ No newline at end of file diff --git a/src/UnityExtension/Assets/Editor/GitHub.Unity/ExtensionLoader/ExtensionLoader.cs b/src/UnityExtension/Assets/Editor/GitHub.Unity/ExtensionLoader/ExtensionLoader.cs new file mode 100644 index 000000000..f3a7e9eef --- /dev/null +++ b/src/UnityExtension/Assets/Editor/GitHub.Unity/ExtensionLoader/ExtensionLoader.cs @@ -0,0 +1,93 @@ +using UnityEditor; +using UnityEngine; +using System.IO; +using System; + +namespace GitHub.Unity +{ + [InitializeOnLoad] + public class ExtensionLoader : ScriptableSingleton + { + [SerializeField] private bool initialized = true; + + public bool Initialized + { + get + { + return initialized; + } + set + { + initialized = value; + Save(true); + } + } + + private static bool inSourceMode = false; + private const string sourceModePath = "Assets/Editor/build/"; + private const string realPath = "Assets/Plugins/GitHub/Editor/"; + + private static string[] assemblies20 = { "System.Threading.dll", "AsyncBridge.Net35.dll", "ReadOnlyCollectionsInterfaces.dll", "GitHub.Api.dll", "GitHub.Unity.dll" }; + private static string[] assemblies45 = { "GitHub.Api.45.dll", "GitHub.Unity.45.dll" }; + + private const string GITHUB_UNITY_DISABLE = "GITHUB_UNITY_DISABLE"; + private static bool IsDisabled { get { return Environment.GetEnvironmentVariable(GITHUB_UNITY_DISABLE) == "1"; } } + + static ExtensionLoader() + { + if (IsDisabled) + { + return; + } + EditorApplication.update += Initialize; + } + + private static void Initialize() + { + EditorApplication.update -= Initialize; + + // we're always doing this right now because if the plugin gets updated all the meta files will be disabled and we need to re-enable them + // we should probably detect if our assets change and re-run this instead of doing it every time + //if (!ExtensionLoader.instance.Initialized) + { + var scriptPath = Path.Combine(Application.dataPath, "Editor" + Path.DirectorySeparatorChar + "GitHub.Unity" + Path.DirectorySeparatorChar + "EntryPoint.cs"); + inSourceMode = File.Exists(scriptPath); + ToggleAssemblies(); + //ExtensionLoader.instance.Initialized = true; + AssetDatabase.SaveAssets(); + } + + } + + private static void ToggleAssemblies() + { + var path = inSourceMode ? sourceModePath : realPath; +#if NET_4_6 + ToggleAssemblies(path, assemblies20, false); + ToggleAssemblies(path, assemblies45, true); +#else + ToggleAssemblies(path, assemblies45, false); + ToggleAssemblies(path, assemblies20, true); +#endif + } + + private static void ToggleAssemblies(string path, string[] assemblies, bool enable) + { + foreach (var file in assemblies) + { + var filepath = path + file; + PluginImporter importer = AssetImporter.GetAtPath(filepath) as PluginImporter; + if (importer == null) + { + Debug.LogFormat("GitHub for Unity: Could not find importer for {0}. Some functionality may fail.", filepath); + continue; + } + if (importer.GetCompatibleWithEditor() != enable) + { + importer.SetCompatibleWithEditor(enable); + importer.SaveAndReimport(); + } + } + } + } +} diff --git a/src/UnityExtension/Assets/Editor/GitHub.Unity/ExtensionLoader/ExtensionLoader.csproj b/src/UnityExtension/Assets/Editor/GitHub.Unity/ExtensionLoader/ExtensionLoader.csproj new file mode 100644 index 000000000..29dd7e770 --- /dev/null +++ b/src/UnityExtension/Assets/Editor/GitHub.Unity/ExtensionLoader/ExtensionLoader.csproj @@ -0,0 +1,82 @@ + + + + + Debug + AnyCPU + {6B0EAB30-511A-44C1-87FE-D9AB7E34D115} + Library + Properties + GitHub.Unity + ExtensionLoader + v3.5 + 512 + ..\..\..\..\obj\ + ..\..\..\..\obj\ + + + + true + full + false + DEBUG;TRACE;$(BuildDefs) + prompt + 4 + 4 + false + false + true + + + pdbonly + true + TRACE;$(BuildDefs) + prompt + 4 + 4 + Release + false + false + true + + + true + full + false + TRACE;DEBUG;DEVELOPER_BUILD;$(BuildDefs) + prompt + 4 + 4 + false + false + true + + + + + + {F94F8AE1-C171-4A83-89E8-6557CA91A188} + UnityShim + + + $(UnityDir)Managed\UnityEditor.dll + False + + + $(UnityDir)Managed\UnityEngine.dll + False + + + + + + + + + \ No newline at end of file diff --git a/src/UnityExtension/Assets/Editor/GitHub.Unity/ExtensionLoader/UnityAPIWrapper.cs b/src/UnityExtension/Assets/Editor/GitHub.Unity/ExtensionLoader/UnityAPIWrapper.cs new file mode 100644 index 000000000..d034a91e3 --- /dev/null +++ b/src/UnityExtension/Assets/Editor/GitHub.Unity/ExtensionLoader/UnityAPIWrapper.cs @@ -0,0 +1,20 @@ +using UnityEditor; +using UnityEngine; +using System.IO; +using System; + +namespace GitHub.Unity +{ + [InitializeOnLoad] + public class UnityAPIWrapper : ScriptableSingleton + { + static UnityAPIWrapper() + { +#if UNITY_2018_2_OR_NEWER + Editor.finishedDefaultHeaderGUI += editor => { + UnityShim.Raise_Editor_finishedDefaultHeaderGUI(editor); + }; +#endif + } + } +} \ No newline at end of file diff --git a/src/UnityExtension/Assets/Editor/GitHub.Unity/GitHub.Unity.45.csproj b/src/UnityExtension/Assets/Editor/GitHub.Unity/GitHub.Unity.45.csproj new file mode 100644 index 000000000..5945f94ea --- /dev/null +++ b/src/UnityExtension/Assets/Editor/GitHub.Unity/GitHub.Unity.45.csproj @@ -0,0 +1,233 @@ + + + + + Debug + AnyCPU + {ADD7A18B-DD2A-4C22-A2C1-488964EFF30B} + Library + Properties + GitHub.Unity + GitHub.Unity.45 + v4.5 + 512 + $(SolutionDir)\unity\TestProject\Assets\Plugins\GitHub\Editor + ..\..\..\obj\ + + + + true + full + false + DEBUG;TRACE;$(BuildDefs);NET_4_6 + prompt + 4 + 4 + false + false + true + + + pdbonly + true + TRACE;$(BuildDefs);NET_4_6 + prompt + 4 + 4 + Release + false + false + true + + + true + full + false + TRACE;DEBUG;DEVELOPER_BUILD;$(BuildDefs);NET_4_6 + prompt + 4 + 4 + false + false + true + + + + + + + + + $(UnityDir)Managed\UnityEditor.dll + False + + + $(UnityDir)Managed\UnityEngine.dll + False + + + + + {b389adaf-62cc-486e-85b4-2d8b078df76B} + GitHub.Api.45 + + + {bb6a8eda-15d8-471b-a6ed-ee551e0b3ba0} + GitHub.Logging + + + {F94F8AE1-C171-4A83-89E8-6557CA91A188} + UnityShim + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + eula.txt + PreserveNewest + + + credits.txt + PreserveNewest + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/src/UnityExtension/Assets/Editor/GitHub.Unity/GitHub.Unity.45.v3.ncrunchproject b/src/UnityExtension/Assets/Editor/GitHub.Unity/GitHub.Unity.45.v3.ncrunchproject new file mode 100644 index 000000000..319cd523c --- /dev/null +++ b/src/UnityExtension/Assets/Editor/GitHub.Unity/GitHub.Unity.45.v3.ncrunchproject @@ -0,0 +1,5 @@ + + + True + + \ No newline at end of file diff --git a/src/UnityExtension/Assets/Editor/GitHub.Unity/GitHub.Unity.asmdef b/src/UnityExtension/Assets/Editor/GitHub.Unity/GitHub.Unity.asmdef index 7e63aedb3..47e8a65d3 100644 --- a/src/UnityExtension/Assets/Editor/GitHub.Unity/GitHub.Unity.asmdef +++ b/src/UnityExtension/Assets/Editor/GitHub.Unity/GitHub.Unity.asmdef @@ -1,8 +1,11 @@ { "name": "GitHub.Unity", - "references": [], + "references": [ + ], + "optionalUnityReferences": [], "includePlatforms": [ "Editor" ], - "excludePlatforms": [] + "excludePlatforms": [], + "allowUnsafeCode": false } \ No newline at end of file diff --git a/src/UnityExtension/Assets/Editor/GitHub.Unity/GitHub.Unity.csproj b/src/UnityExtension/Assets/Editor/GitHub.Unity/GitHub.Unity.csproj index 95b438d79..1d1eceee7 100644 --- a/src/UnityExtension/Assets/Editor/GitHub.Unity/GitHub.Unity.csproj +++ b/src/UnityExtension/Assets/Editor/GitHub.Unity/GitHub.Unity.csproj @@ -77,6 +77,9 @@ + + + @@ -104,9 +107,6 @@ - - - {b389adaf-62cc-486e-85b4-2d8b078df763} @@ -116,6 +116,10 @@ {bb6a8eda-15d8-471b-a6ed-ee551e0b3ba0} GitHub.Logging + + {F94F8AE1-C171-4A83-89E8-6557CA91A188} + UnityShim + @@ -167,8 +171,8 @@ - - + + diff --git a/src/UnityExtension/Assets/Editor/GitHub.Unity/IconsAndLogos/moved.png b/src/UnityExtension/Assets/Editor/GitHub.Unity/IconsAndLogos/renamed.png similarity index 100% rename from src/UnityExtension/Assets/Editor/GitHub.Unity/IconsAndLogos/moved.png rename to src/UnityExtension/Assets/Editor/GitHub.Unity/IconsAndLogos/renamed.png diff --git a/src/UnityExtension/Assets/Editor/GitHub.Unity/IconsAndLogos/moved@2x.png b/src/UnityExtension/Assets/Editor/GitHub.Unity/IconsAndLogos/renamed@2x.png similarity index 100% rename from src/UnityExtension/Assets/Editor/GitHub.Unity/IconsAndLogos/moved@2x.png rename to src/UnityExtension/Assets/Editor/GitHub.Unity/IconsAndLogos/renamed@2x.png diff --git a/src/UnityExtension/Assets/Editor/GitHub.Unity/Misc/Styles.cs b/src/UnityExtension/Assets/Editor/GitHub.Unity/Misc/Styles.cs index 8896c1068..2fbeaf7c3 100644 --- a/src/UnityExtension/Assets/Editor/GitHub.Unity/Misc/Styles.cs +++ b/src/UnityExtension/Assets/Editor/GitHub.Unity/Misc/Styles.cs @@ -65,6 +65,7 @@ class Styles headerBranchLabelStyle, headerUrlLabelStyle, headerRepoLabelStyle, + fileHistoryLogTitleStyle, headerTitleStyle, headerDescriptionStyle, toolbarButtonStyle, @@ -96,33 +97,6 @@ class Styles locksViewLockedByStyle, locksViewLockedBySelectedStyle; - private static Texture2D branchIcon, - activeBranchIcon, - trackingBranchIcon, - favoriteIconOn, - favoriteIconOff, - smallLogoIcon, - bigLogoIcon, - folderIcon, - mergeIcon, - dotIcon, - localCommitIcon, - repoIcon, - lockIcon, - emptyStateInit, - dropdownListIcon, - globeIcon, - spinnerInside, - spinnerOutside, - code, - rocket, - merge, - spinnerInsideInverted, - spinnerOutsideInverted, - codeInverted, - rocketInverted, - mergeInverted; - public static Texture2D GetFileStatusIcon(GitFileStatus status, bool isLocked) { if (isLocked) @@ -494,20 +468,19 @@ public static GUIStyle ToolbarButtonStyle toolbarButtonStyle = new GUIStyle(EditorStyles.toolbarButton); toolbarButtonStyle.name = "HistoryToolbarButtonStyle"; toolbarButtonStyle.richText = true; - toolbarButtonStyle.wordWrap = true; } return toolbarButtonStyle; } } - public static GUIStyle HistoryLockStyle + public static GUIStyle LockButtonStyle { get { if (historyLockStyle == null) { historyLockStyle = new GUIStyle(GUI.skin.FindStyle("IN LockButton")); - historyLockStyle.name = "HistoryLockStyle"; + historyLockStyle.name = "LockStyle"; } historyLockStyle.margin = new RectOffset(3, 3, 2, 2); return historyLockStyle; @@ -762,8 +735,7 @@ public static GUIStyle BoldCenteredLabel return boldCenteredLabel; } } - - + public static GUIStyle CommitDescriptionFieldStyle { get @@ -830,15 +802,25 @@ public static GUIStyle HyperlinkStyle } } - public static Texture2D ActiveBranchIcon + public static GUIStyle FileHistoryLogTitleStyle { get { - if (activeBranchIcon == null) + if (fileHistoryLogTitleStyle == null) { - activeBranchIcon = Utility.GetIcon("current-branch-indicator.png", "current-branch-indicator@2x.png"); + fileHistoryLogTitleStyle = new GUIStyle(EditorStyles.largeLabel); + fileHistoryLogTitleStyle.name = "FileHistoryLogTitleStyle"; + fileHistoryLogTitleStyle.margin = new RectOffset(0, 0, 0, 0); } - return activeBranchIcon; + return fileHistoryLogTitleStyle; + } + } + + public static Texture2D ActiveBranchIcon + { + get + { + return Utility.GetIcon("current-branch-indicator.png", "current-branch-indicator@2x.png", Utility.IsDarkTheme); } } @@ -846,11 +828,7 @@ public static Texture2D BranchIcon { get { - if (branchIcon == null) - { - branchIcon = Utility.GetIcon("branch.png", "branch@2x.png"); - } - return branchIcon; + return Utility.GetIcon("branch.png", "branch@2x.png"); } } @@ -858,12 +836,7 @@ public static Texture2D TrackingBranchIcon { get { - if (trackingBranchIcon == null) - { - trackingBranchIcon = Utility.GetIcon("tracked-branch-indicator.png"); - } - - return trackingBranchIcon; + return Utility.GetIcon("tracked-branch-indicator.png"); } } @@ -871,12 +844,7 @@ public static Texture2D FavoriteIconOn { get { - if (favoriteIconOn == null) - { - favoriteIconOn = Utility.GetIcon("favorite-branch-indicator.png"); - } - - return favoriteIconOn; + return Utility.GetIcon("favorite-branch-indicator.png"); } } @@ -884,12 +852,7 @@ public static Texture2D FavoriteIconOff { get { - if (favoriteIconOff == null) - { - favoriteIconOff = FolderIcon; - } - - return favoriteIconOff; + return FolderIcon; } } @@ -897,12 +860,7 @@ public static Texture2D SmallLogo { get { - if (smallLogoIcon == null) - { - smallLogoIcon = Utility.GetIcon("small-logo.png"); - } - - return smallLogoIcon; + return Utility.IsDarkTheme ? Utility.GetIcon("small-logo-light.png", "small-logo-light@2x.png") : Utility.GetIcon("small-logo.png", "small-logo@2x.png"); } } @@ -910,16 +868,7 @@ public static Texture2D BigLogo { get { - if (bigLogoIcon == null) - { - var defaultTextColor = Label.normal.textColor; - if (defaultTextColor.r > 0.5f && defaultTextColor.g > 0.5f && defaultTextColor.b > 0.5f) - bigLogoIcon = Utility.GetIcon("big-logo-light.png"); - else - bigLogoIcon = Utility.GetIcon("big-logo.png"); - } - - return bigLogoIcon; + return Utility.IsDarkTheme ? Utility.GetIcon("big-logo-light.png", "big-logo-light@2x.png") : Utility.GetIcon("big-logo.png", "big-logo@2x.png"); } } @@ -927,12 +876,7 @@ public static Texture2D MergeIcon { get { - if (mergeIcon == null) - { - mergeIcon = Utility.GetIcon("git-merge.png", "git-merge@2x.png"); - } - - return mergeIcon; + return Utility.GetIcon("git-merge.png", "git-merge@2x.png"); } } @@ -940,12 +884,7 @@ public static Texture2D DotIcon { get { - if (dotIcon == null) - { - dotIcon = Utility.GetIcon("dot.png", "dot@2x.png"); - } - - return dotIcon; + return Utility.GetIcon("dot.png", "dot@2x.png", Utility.IsDarkTheme); } } @@ -953,12 +892,7 @@ public static Texture2D LocalCommitIcon { get { - if (localCommitIcon == null) - { - localCommitIcon = Utility.GetIcon("local-commit-icon.png", "local-commit-icon@2x.png"); - } - - return localCommitIcon; + return Utility.GetIcon("local-commit-icon.png", "local-commit-icon@2x.png", Utility.IsDarkTheme); } } @@ -966,12 +900,7 @@ public static Texture2D FolderIcon { get { - if (folderIcon == null) - { - folderIcon = EditorGUIUtility.FindTexture("Folder Icon"); - } - - return folderIcon; + return EditorGUIUtility.FindTexture("Folder Icon"); } } @@ -979,11 +908,7 @@ public static Texture2D RepoIcon { get { - if (repoIcon == null) - { - repoIcon = Utility.GetIcon("repo.png", "repo@2x.png"); - } - return repoIcon; + return Utility.GetIcon("repo.png", "repo@2x.png", Utility.IsDarkTheme); } } @@ -991,36 +916,23 @@ public static Texture2D LockIcon { get { - if (lockIcon == null) - { - lockIcon = Utility.GetIcon("lock.png", "lock@2x.png"); - } - return lockIcon; + return Utility.GetIcon("lock.png", "lock@2x.png"); } } public static Texture2D EmptyStateInit { - get - { - if (emptyStateInit == null) + get { - emptyStateInit = Utility.GetIcon("empty-state-init.png", "empty-state-init@2x.png"); + return Utility.GetIcon("empty-state-init.png", "empty-state-init@2x.png"); } - return emptyStateInit; - } - } public static Texture2D DropdownListIcon { get { - if (dropdownListIcon == null) - { - dropdownListIcon = Utility.GetIcon("dropdown-list-icon.png", "dropdown-list-icon@2x.png"); - } - return dropdownListIcon; + return Utility.GetIcon("dropdown-list-icon.png", "dropdown-list-icon@2x.png"); } } @@ -1028,11 +940,7 @@ public static Texture2D GlobeIcon { get { - if (globeIcon == null) - { - globeIcon = Utility.GetIcon("globe.png", "globe@2x.png"); - } - return globeIcon; + return Utility.GetIcon("globe.png", "globe@2x.png", Utility.IsDarkTheme); } } @@ -1040,11 +948,7 @@ public static Texture2D SpinnerInside { get { - if (spinnerInside == null) - { - spinnerInside = Utility.GetIcon("spinner-inside.png", "spinner-inside@2x.png"); - } - return spinnerInside; + return Utility.GetIcon("spinner-inside.png", "spinner-inside@2x.png"); } } @@ -1052,11 +956,7 @@ public static Texture2D SpinnerOutside { get { - if (spinnerOutside == null) - { - spinnerOutside = Utility.GetIcon("spinner-outside.png", "spinner-outside@2x.png"); - } - return spinnerOutside; + return Utility.GetIcon("spinner-outside.png", "spinner-outside@2x.png"); } } @@ -1064,11 +964,7 @@ public static Texture2D Code { get { - if (code == null) - { - code = Utility.GetIcon("code.png", "code@2x.png"); - } - return code; + return Utility.GetIcon("code.png", "code@2x.png"); } } @@ -1076,11 +972,7 @@ public static Texture2D Rocket { get { - if (rocket == null) - { - rocket = Utility.GetIcon("rocket.png", "rocket@2x.png"); - } - return rocket; + return Utility.GetIcon("rocket.png", "rocket@2x.png"); } } @@ -1088,11 +980,7 @@ public static Texture2D Merge { get { - if (merge == null) - { - merge = Utility.GetIcon("merge.png", "merge@2x.png"); - } - return merge; + return Utility.GetIcon("merge.png", "merge@2x.png"); } } @@ -1100,12 +988,7 @@ public static Texture2D SpinnerInsideInverted { get { - if (spinnerInsideInverted == null) - { - spinnerInsideInverted = Utility.GetIcon("spinner-inside.png", "spinner-inside@2x.png"); - spinnerInsideInverted.InvertColors(); - } - return spinnerInsideInverted; + return Utility.GetIcon("spinner-inside.png", "spinner-inside@2x.png", true); } } @@ -1113,12 +996,7 @@ public static Texture2D SpinnerOutsideInverted { get { - if (spinnerOutsideInverted == null) - { - spinnerOutsideInverted = Utility.GetIcon("spinner-outside.png", "spinner-outside@2x.png"); - spinnerOutsideInverted.InvertColors(); - } - return spinnerOutsideInverted; + return Utility.GetIcon("spinner-outside.png", "spinner-outside@2x.png", true); } } @@ -1126,12 +1004,7 @@ public static Texture2D CodeInverted { get { - if (codeInverted == null) - { - codeInverted = Utility.GetIcon("code.png", "code@2x.png"); - codeInverted.InvertColors(); - } - return codeInverted; + return Utility.GetIcon("code.png", "code@2x.png", true); } } @@ -1139,12 +1012,7 @@ public static Texture2D RocketInverted { get { - if (rocketInverted == null) - { - rocketInverted = Utility.GetIcon("rocket.png", "rocket@2x.png"); - rocketInverted.InvertColors(); - } - return rocketInverted; + return Utility.GetIcon("rocket.png", "rocket@2x.png", true); } } @@ -1152,12 +1020,7 @@ public static Texture2D MergeInverted { get { - if (mergeInverted == null) - { - mergeInverted = Utility.GetIcon("merge.png", "merge@2x.png"); - mergeInverted.InvertColors(); - } - return mergeInverted; + return Utility.GetIcon("merge.png", "merge@2x.png", true); } } private static GUIStyle foldout; diff --git a/src/UnityExtension/Assets/Editor/GitHub.Unity/Misc/Utility.cs b/src/UnityExtension/Assets/Editor/GitHub.Unity/Misc/Utility.cs index c11549630..c43c76893 100644 --- a/src/UnityExtension/Assets/Editor/GitHub.Unity/Misc/Utility.cs +++ b/src/UnityExtension/Assets/Editor/GitHub.Unity/Misc/Utility.cs @@ -5,34 +5,53 @@ using System.Reflection; using UnityEditor; using UnityEngine; +using System.Collections.Generic; namespace GitHub.Unity { class Utility : ScriptableObject { - public static Texture2D GetIcon(string filename, string filename2x = "") + private static Dictionary iconCache = new Dictionary(); + + public static bool IsDarkTheme { + get { + var defaultTextColor = Styles.Label.normal.textColor; + return defaultTextColor.r > 0.5f && defaultTextColor.g > 0.5f && defaultTextColor.b > 0.5f; + } + } + + public static Texture2D GetIcon(string filename, string filename2x = "", bool invertColors = false) { if (EditorGUIUtility.pixelsPerPoint > 1f && !string.IsNullOrEmpty(filename2x)) { filename = filename2x; } - Texture2D texture2D = null; + var key = invertColors ? "dark_" + filename : "light_" + filename; - var stream = Assembly.GetExecutingAssembly().GetManifestResourceStream("GitHub.Unity.IconsAndLogos." + filename); - if (stream != null) + if (iconCache.ContainsKey(key)) { - texture2D = stream.ToTexture2D(); + return iconCache[key]; } - else + + Texture2D texture2D = null; + + var stream = Assembly.GetExecutingAssembly().GetManifestResourceStream("GitHub.Unity.IconsAndLogos." + filename); + if (stream == null) { - var iconPath = "Assets/Editor/GitHub.Unity/IconsAndLogos/" + filename; - texture2D = AssetDatabase.LoadAssetAtPath(iconPath); + stream = new MemoryStream(Application.dataPath.ToNPath().Combine("Editor/GitHub.Unity/IconsAndLogos/", filename).ReadAllBytes()); } + texture2D = stream.ToTexture2D(); + stream.Dispose(); + if (texture2D != null) { texture2D.hideFlags = HideFlags.HideAndDontSave; + if (invertColors) { + texture2D.InvertColors(); + } + iconCache.Add(key, texture2D); } return texture2D; diff --git a/src/UnityExtension/Assets/Editor/GitHub.Unity/Properties/AssemblyInfo.cs b/src/UnityExtension/Assets/Editor/GitHub.Unity/Properties/AssemblyInfo.cs index effeac2f0..a320c4438 100644 --- a/src/UnityExtension/Assets/Editor/GitHub.Unity/Properties/AssemblyInfo.cs +++ b/src/UnityExtension/Assets/Editor/GitHub.Unity/Properties/AssemblyInfo.cs @@ -2,6 +2,6 @@ using System.Runtime.CompilerServices; using System.Runtime.InteropServices; -[assembly: AssemblyTitle("GitHub.Api")] -[assembly: AssemblyDescription("GitHub Api")] +[assembly: AssemblyTitle("GitHub.Unity")] +[assembly: AssemblyDescription("GitHub for Unity")] [assembly: Guid("add7a18b-dd2a-4c22-a2c1-488964eff30a")] diff --git a/src/UnityExtension/Assets/Editor/GitHub.Unity/ScriptObjectSingleton.cs b/src/UnityExtension/Assets/Editor/GitHub.Unity/ScriptObjectSingleton.cs index a5628f284..42452077c 100644 --- a/src/UnityExtension/Assets/Editor/GitHub.Unity/ScriptObjectSingleton.cs +++ b/src/UnityExtension/Assets/Editor/GitHub.Unity/ScriptObjectSingleton.cs @@ -10,20 +10,34 @@ namespace GitHub.Unity sealed class LocationAttribute : Attribute { public enum Location { PreferencesFolder, ProjectFolder, LibraryFolder, UserFolder } - public string filepath { get; set; } + + private string relativePath; + private Location location; + + private string filePath; + public string FilePath { + get { + if (filePath != null) return filePath; + + if (relativePath[0] == '/') + relativePath = relativePath.Substring(1); + + if (location == Location.PreferencesFolder) + filePath = InternalEditorUtility.unityPreferencesFolder + "/" + relativePath; + else if (location == Location.UserFolder) + filePath = EntryPoint.ApplicationManager.Environment.UserCachePath.Combine(relativePath).ToString(SlashMode.Forward); + else if (location == Location.LibraryFolder) + filePath = EntryPoint.ApplicationManager.Environment.UnityProjectPath.Combine("Library", "gfu", relativePath); + + return filePath; + } + } + public LocationAttribute(string relativePath, Location location) { Guard.ArgumentNotNullOrWhiteSpace(relativePath, "relativePath"); - - if (relativePath[0] == '/') - relativePath = relativePath.Substring(1); - - if (location == Location.PreferencesFolder) - filepath = InternalEditorUtility.unityPreferencesFolder + "/" + relativePath; - else if (location == Location.UserFolder) - filepath = EntryPoint.ApplicationManager.Environment.UserCachePath.Combine(relativePath).ToString(SlashMode.Forward); - else if (location == Location.LibraryFolder) - filepath = EntryPoint.ApplicationManager.Environment.UnityProjectPath.Combine("Library", "gfu", relativePath); + this.relativePath = relativePath; + this.location = location; } } @@ -99,7 +113,7 @@ protected virtual void Save(bool saveAsText) if (attr == null) return null; - return attr.filepath.ToNPath(); + return attr.FilePath.ToNPath(); } } } \ No newline at end of file diff --git a/src/UnityExtension/Assets/Editor/GitHub.Unity/Services/AuthenticationService.cs b/src/UnityExtension/Assets/Editor/GitHub.Unity/Services/AuthenticationService.cs index bd215045e..b756bfa94 100644 --- a/src/UnityExtension/Assets/Editor/GitHub.Unity/Services/AuthenticationService.cs +++ b/src/UnityExtension/Assets/Editor/GitHub.Unity/Services/AuthenticationService.cs @@ -1,4 +1,7 @@ using System; +using System.Text; +using System.Threading; +using GitHub.Logging; namespace GitHub.Unity { @@ -8,14 +11,18 @@ class AuthenticationService private LoginResult loginResultData; - public AuthenticationService(IProcessManager processManager, ITaskManager taskManager, UriString host, IKeychain keychain, NPath nodeJsExecutablePath, NPath octorunExecutablePath) + public AuthenticationService(UriString host, IKeychain keychain, + IProcessManager processManager, ITaskManager taskManager, + IEnvironment environment + ) { - client = new ApiClient(host, keychain, processManager, taskManager, nodeJsExecutablePath, octorunExecutablePath); + client = new ApiClient(keychain, processManager, taskManager, environment, host); } + public HostAddress HostAddress { get { return client.HostAddress; } } + public void Login(string username, string password, Action twofaRequired, Action authResult) { - loginResultData = null; client.Login(username, password, r => { @@ -24,11 +31,53 @@ public void Login(string username, string password, Action twofaRequired }, authResult); } + public void LoginWithToken(string token, Action authResult) + { + client.LoginWithToken(token, authResult); + } + public void LoginWith2fa(string code) { if (loginResultData == null) throw new InvalidOperationException("Call Login() first"); client.ContinueLogin(loginResultData, code); } + + public void GetServerMeta(Action serverMeta, Action error) + { + loginResultData = null; + client.GetEnterpriseServerMeta(data => + { + serverMeta(data); + }, exception => { + error(exception.Message); + }); + } + + public Uri GetLoginUrl(string state) + { + var query = new StringBuilder(); + + query.Append("client_id="); + query.Append(Uri.EscapeDataString(ApplicationInfo.ClientId)); + query.Append("&redirect_uri="); + query.Append(Uri.EscapeDataString(OAuthCallbackManager.CallbackUrl.ToString())); + query.Append("&scope="); + query.Append(Uri.EscapeDataString("user,repo")); + query.Append("&state="); + query.Append(Uri.EscapeDataString(state)); + + var uri = new Uri(HostAddress.WebUri, "login/oauth/authorize"); + var uriBuilder = new UriBuilder(uri) + { + Query = query.ToString() + }; + return uriBuilder.Uri; + } + + public void LoginWithOAuthCode(string code, Action result) + { + client.CreateOAuthToken(code, result); + } } } diff --git a/src/UnityExtension/Assets/Editor/GitHub.Unity/UI/AuthenticationView.cs b/src/UnityExtension/Assets/Editor/GitHub.Unity/UI/AuthenticationView.cs index b9430852c..dbd51c4c7 100644 --- a/src/UnityExtension/Assets/Editor/GitHub.Unity/UI/AuthenticationView.cs +++ b/src/UnityExtension/Assets/Editor/GitHub.Unity/UI/AuthenticationView.cs @@ -1,4 +1,5 @@ using System; +using System.Linq; using System.Threading; using UnityEngine; using UnityEditor; @@ -10,263 +11,156 @@ class AuthenticationView : Subview { private static readonly Vector2 viewSize = new Vector2(290, 290); - private const string CredentialsNeedRefreshMessage = "We've detected that your stored credentials are out of sync with your current user. This can happen if you have signed in to git outside of Unity. Sign in again to refresh your credentials."; - private const string NeedAuthenticationMessage = "We need you to authenticate first"; private const string WindowTitle = "Authenticate"; - private const string UsernameLabel = "Username"; - private const string PasswordLabel = "Password"; - private const string TwofaLabel = "2FA Code"; - private const string LoginButton = "Sign in"; - private const string BackButton = "Back"; - private const string AuthTitle = "Sign in to GitHub"; - private const string TwofaTitle = "Two-Factor Authentication"; - private const string TwofaDescription = "Open the two-factor authentication app on your device to view your 2FA code and verify your identity."; - private const string TwofaButton = "Verify"; - [SerializeField] private Vector2 scroll; - [SerializeField] private string username = string.Empty; - [SerializeField] private string two2fa = string.Empty; - [SerializeField] private string message; - [SerializeField] private string errorMessage; - [SerializeField] private bool need2fa; - - [NonSerialized] private bool isBusy; - [NonSerialized] private bool enterPressed; - [NonSerialized] private string password = string.Empty; - [NonSerialized] private AuthenticationService authenticationService; + [SerializeField] private SubTab changeTab = SubTab.GitHub; + [SerializeField] private SubTab activeTab = SubTab.GitHub; + [SerializeField] private GitHubAuthenticationView gitHubAuthenticationView; + [SerializeField] private GitHubEnterpriseAuthenticationView gitHubEnterpriseAuthenticationView; + [SerializeField] private bool hasGitHubDotComConnection; + [SerializeField] private bool hasGitHubEnterpriseConnection; public override void InitializeView(IView parent) { base.InitializeView(parent); - need2fa = isBusy = false; - message = errorMessage = null; Title = WindowTitle; Size = viewSize; - } - public void Initialize(Exception exception) - { - var usernameMismatchException = exception as TokenUsernameMismatchException; - if (usernameMismatchException != null) - { - message = CredentialsNeedRefreshMessage; - username = usernameMismatchException.CachedUsername; - } + gitHubAuthenticationView = gitHubAuthenticationView ?? new GitHubAuthenticationView(); + gitHubEnterpriseAuthenticationView = gitHubEnterpriseAuthenticationView ?? new GitHubEnterpriseAuthenticationView(); - var keychainEmptyException = exception as KeychainEmptyException; - if (keychainEmptyException != null) + try { - message = NeedAuthenticationMessage; + OAuthCallbackManager.Start(); } - - if (usernameMismatchException == null && keychainEmptyException == null) + catch (Exception ex) { - message = exception.Message; + Logger.Trace(ex, "Error Starting OAuthCallbackManager"); } - } - public override void OnGUI() - { - HandleEnterPressed(); + gitHubAuthenticationView.InitializeView(this); + gitHubEnterpriseAuthenticationView.InitializeView(this); - EditorGUIUtility.labelWidth = 90f; + hasGitHubDotComConnection = Platform.Keychain.Connections.Any(HostAddress.IsGitHubDotCom); + hasGitHubEnterpriseConnection = Platform.Keychain.Connections.Any(connection => !HostAddress.IsGitHubDotCom(connection)); - scroll = GUILayout.BeginScrollView(scroll); + if (hasGitHubDotComConnection) { - GUILayout.BeginHorizontal(Styles.AuthHeaderBoxStyle); - { - GUILayout.Label(AuthTitle, Styles.HeaderRepoLabelStyle); - } - GUILayout.EndHorizontal(); - - GUILayout.BeginVertical(); - { - if (!need2fa) - { - OnGUILogin(); - } - else - { - OnGUI2FA(); - } - } - - GUILayout.EndVertical(); + changeTab = SubTab.GitHubEnterprise; + UpdateActiveTab(); } - GUILayout.EndScrollView(); } - - private void HandleEnterPressed() - { - if (Event.current.type != EventType.KeyDown) - return; - enterPressed = Event.current.keyCode == KeyCode.Return || Event.current.keyCode == KeyCode.KeypadEnter; - if (enterPressed) - Event.current.Use(); + public void Initialize(Exception exception) + { + } - private void OnGUILogin() + public override void OnGUI() { - EditorGUI.BeginDisabledGroup(isBusy); - { - ShowMessage(); - - EditorGUILayout.Space(); - - GUILayout.BeginHorizontal(); - { - username = EditorGUILayout.TextField(UsernameLabel ,username, Styles.TextFieldStyle); - } - GUILayout.EndHorizontal(); - - EditorGUILayout.Space(); - - GUILayout.BeginHorizontal(); - { - password = EditorGUILayout.PasswordField(PasswordLabel, password, Styles.TextFieldStyle); - } - GUILayout.EndHorizontal(); - - EditorGUILayout.Space(); - - ShowErrorMessage(); - - GUILayout.Space(Styles.BaseSpacing + 3); - GUILayout.BeginHorizontal(); - { - GUILayout.FlexibleSpace(); - if (GUILayout.Button(LoginButton) || (!isBusy && enterPressed)) - { - GUI.FocusControl(null); - isBusy = true; - AuthenticationService.Login(username, password, DoRequire2fa, DoResult); - } - } - GUILayout.EndHorizontal(); - } - EditorGUI.EndDisabledGroup(); + DoToolbarGUI(); + ActiveView.OnGUI(); } - - private void OnGUI2FA() + + public override bool IsBusy { - GUILayout.BeginVertical(); - { - GUILayout.Label(TwofaTitle, EditorStyles.boldLabel); - GUILayout.Label(TwofaDescription, EditorStyles.wordWrappedLabel); - - EditorGUI.BeginDisabledGroup(isBusy); - { - EditorGUILayout.Space(); - two2fa = EditorGUILayout.TextField(TwofaLabel, two2fa, Styles.TextFieldStyle); - EditorGUILayout.Space(); - ShowErrorMessage(); - - GUILayout.BeginHorizontal(); - { - GUILayout.FlexibleSpace(); - if (GUILayout.Button(BackButton)) - { - GUI.FocusControl(null); - Clear(); - } - - if (GUILayout.Button(TwofaButton) || (!isBusy && enterPressed)) - { - GUI.FocusControl(null); - isBusy = true; - AuthenticationService.LoginWith2fa(two2fa); - } - } - GUILayout.EndHorizontal(); - - EditorGUILayout.Space(); - } - EditorGUI.EndDisabledGroup(); - } - GUILayout.EndVertical(); + get { return (gitHubAuthenticationView != null && gitHubAuthenticationView.IsBusy) || (gitHubEnterpriseAuthenticationView != null && gitHubEnterpriseAuthenticationView.IsBusy); } } - private void DoRequire2fa(string msg) + public override void OnDataUpdate() { - need2fa = true; - errorMessage = msg; - isBusy = false; - Redraw(); + base.OnDataUpdate(); + MaybeUpdateData(); } - private void Clear() + public override void Finish(bool result) { - need2fa = false; - errorMessage = null; - isBusy = false; - Redraw(); + OAuthCallbackManager.Stop(); + base.Finish(result); } - private void DoResult(bool success, string msg) + private void MaybeUpdateData() { - isBusy = false; - if (success) - { - UsageTracker.IncrementAuthenticationViewButtonAuthentication(); - - Clear(); - Finish(true); - } - else - { - errorMessage = msg; - Redraw(); - } } - private void ShowMessage() + private static SubTab TabButton(SubTab tab, string title, SubTab currentTab) { - if (message != null) - { - EditorGUILayout.HelpBox(message, MessageType.Warning); - } + return GUILayout.Toggle(currentTab == tab, title, EditorStyles.toolbarButton) ? tab : currentTab; } - private void ShowErrorMessage() + private enum SubTab { - if (errorMessage != null) - { - EditorGUILayout.HelpBox(errorMessage, MessageType.Error); - } + None, + GitHub, + GitHubEnterprise } - private AuthenticationService AuthenticationService + private void DoToolbarGUI() { - get + GUILayout.BeginHorizontal(EditorStyles.toolbar); { - if (authenticationService == null) + EditorGUI.BeginChangeCheck(); { - UriString host; - if (Repository != null && Repository.CloneUrl != null && Repository.CloneUrl.IsValidUri) + EditorGUI.BeginDisabledGroup(hasGitHubDotComConnection || IsBusy); { - host = new UriString(Repository.CloneUrl.ToRepositoryUri() - .GetComponents(UriComponents.SchemeAndServer, UriFormat.SafeUnescaped)); + changeTab = TabButton(SubTab.GitHub, "GitHub", changeTab); } - else + EditorGUI.EndDisabledGroup(); + + EditorGUI.BeginDisabledGroup(hasGitHubEnterpriseConnection || IsBusy); { - host = UriString.ToUriString(HostAddress.GitHubDotComHostAddress.WebUri); + changeTab = TabButton(SubTab.GitHubEnterprise, "GitHub Enterprise", changeTab); } + EditorGUI.EndDisabledGroup(); + } - AuthenticationService = new AuthenticationService(Manager.ProcessManager, Manager.TaskManager, host, Platform.Keychain, Environment.NodeJsExecutablePath, Environment.OctorunScriptPath); + if (EditorGUI.EndChangeCheck()) + { + UpdateActiveTab(); } - return authenticationService; + + GUILayout.FlexibleSpace(); } - set + EditorGUILayout.EndHorizontal(); + } + + private void UpdateActiveTab() + { + if (changeTab != activeTab) { - authenticationService = value; + var fromView = ActiveView; + activeTab = changeTab; + var toView = ActiveView; + SwitchView(fromView, toView); } } + private void SwitchView(Subview fromView, Subview toView) + { + GUI.FocusControl(null); - public override bool IsBusy + if (fromView != null) + fromView.OnDisable(); + toView.OnEnable(); + + // this triggers a repaint + Parent.Redraw(); + } + + private Subview ActiveView { - get { return isBusy; } + get + { + switch (activeTab) + { + case SubTab.GitHub: + return gitHubAuthenticationView; + case SubTab.GitHubEnterprise: + return gitHubEnterpriseAuthenticationView; + default: + throw new NotImplementedException(); + } + } } } } diff --git a/src/UnityExtension/Assets/Editor/GitHub.Unity/UI/BaseWindow.cs b/src/UnityExtension/Assets/Editor/GitHub.Unity/UI/BaseWindow.cs index fa21bebf3..e0fe85c84 100644 --- a/src/UnityExtension/Assets/Editor/GitHub.Unity/UI/BaseWindow.cs +++ b/src/UnityExtension/Assets/Editor/GitHub.Unity/UI/BaseWindow.cs @@ -7,7 +7,7 @@ namespace GitHub.Unity { - abstract class BaseWindow : EditorWindow, IView + public abstract class BaseWindow : EditorWindow, IView { [NonSerialized] private bool initialized = false; [NonSerialized] private IUser cachedUser; @@ -181,4 +181,4 @@ protected ILogging Logger } } } -} \ No newline at end of file +} diff --git a/src/UnityExtension/Assets/Editor/GitHub.Unity/UI/BranchesView.cs b/src/UnityExtension/Assets/Editor/GitHub.Unity/UI/BranchesView.cs index aad002371..70d1d0c56 100644 --- a/src/UnityExtension/Assets/Editor/GitHub.Unity/UI/BranchesView.cs +++ b/src/UnityExtension/Assets/Editor/GitHub.Unity/UI/BranchesView.cs @@ -514,6 +514,7 @@ private void SwitchBranch(string branch) { UsageTracker.IncrementBranchesViewButtonCheckoutLocalBranch(); Redraw(); + AssetDatabase.Refresh(); } else { diff --git a/src/UnityExtension/Assets/Editor/GitHub.Unity/UI/ChangesTreeControl.cs b/src/UnityExtension/Assets/Editor/GitHub.Unity/UI/ChangesTreeControl.cs index d7d9a03c4..9554e2b55 100644 --- a/src/UnityExtension/Assets/Editor/GitHub.Unity/UI/ChangesTreeControl.cs +++ b/src/UnityExtension/Assets/Editor/GitHub.Unity/UI/ChangesTreeControl.cs @@ -29,7 +29,7 @@ public GitStatusEntry GitStatusEntry } public string ProjectPath { get { return GitStatusEntry.projectPath; } } - public GitFileStatus GitFileStatus { get { return GitStatusEntry.status; } } + public GitFileStatus GitFileStatus { get { return GitStatusEntry.Status; } } } [Serializable] @@ -186,7 +186,7 @@ protected Texture GetNodeIconBadge(ChangesTreeNode node) return Styles.GetFileStatusIcon(gitFileStatus, node.IsLocked); } - protected override ChangesTreeNode CreateTreeNode(string path, string label, int level, bool isFolder, bool isActive, bool isHidden, bool isCollapsed, bool isChecked, GitStatusEntryTreeData? treeData, bool isContainer) + protected override ChangesTreeNode CreateTreeNode(string path, string label, int level, bool isFolder, bool isActive, bool isHidden, bool isCollapsed, bool isChecked, GitStatusEntryTreeData? treeData) { var gitStatusEntry = GitStatusEntry.Default; var isLocked = false; diff --git a/src/UnityExtension/Assets/Editor/GitHub.Unity/UI/ChangesView.cs b/src/UnityExtension/Assets/Editor/GitHub.Unity/UI/ChangesView.cs index ec0de8717..0d6235b42 100644 --- a/src/UnityExtension/Assets/Editor/GitHub.Unity/UI/ChangesView.cs +++ b/src/UnityExtension/Assets/Editor/GitHub.Unity/UI/ChangesView.cs @@ -244,6 +244,7 @@ private GenericMenu CreateContextMenu(ChangesTreeNode node) } Repository.DiscardChanges(discardEntries) + .ThenInUI(AssetDatabase.Refresh) .Start(); }); @@ -477,7 +478,7 @@ private void Commit() { isBusy = true; var files = treeChanges.GetCheckedFiles().ToList(); - ITask addTask; + ITask addTask = null; if (files.Count == gitStatusEntries.Count) { @@ -485,11 +486,18 @@ private void Commit() } else { - addTask = Repository.CommitFiles(files, commitMessage, commitBody); + ITask commit = Repository.CommitFiles(files, commitMessage, commitBody); + + // if there are files that have been staged outside of Unity, but they aren't selected for commit, remove them + // from the index before commiting, otherwise the commit will take them along. + var filesStagedButNotChecked = gitStatusEntries.Where(x => x.Staged).Select(x => x.Path).Except(files).ToList(); + if (filesStagedButNotChecked.Count > 0) + addTask = GitClient.Remove(filesStagedButNotChecked); + addTask = addTask == null ? commit : addTask.Then(commit); } addTask - .FinallyInUI((success, exception) => + .FinallyInUI((success, exception) => { if (success) { diff --git a/src/UnityExtension/Assets/Editor/GitHub.Unity/UI/FileHistoryWindow.cs b/src/UnityExtension/Assets/Editor/GitHub.Unity/UI/FileHistoryWindow.cs new file mode 100644 index 000000000..43e32161c --- /dev/null +++ b/src/UnityExtension/Assets/Editor/GitHub.Unity/UI/FileHistoryWindow.cs @@ -0,0 +1,241 @@ +using System; +using System.Linq; +using UnityEditor; +using UnityEngine; + +namespace GitHub.Unity +{ + public class FileHistoryWindow : BaseWindow + { + [MenuItem("Assets/Git/History", false)] + private static void GitFileHistory() + { + if (Selection.assetGUIDs != null) + { + var assetPath = + AssetDatabase.GUIDToAssetPath(Selection.assetGUIDs.First()) + .ToNPath(); + + var windowType = typeof(Window); + var fileHistoryWindow = GetWindow(windowType); + fileHistoryWindow.InitializeWindow(EntryPoint.ApplicationManager); + fileHistoryWindow.SetSelectedPath(assetPath); + fileHistoryWindow.Show(); + } + } + + [MenuItem("Assets/Git/History", true)] + private static bool GitFileHistoryValidation() + { + return Selection.assetGUIDs != null && Selection.assetGUIDs.Length > 0; + } + + private const string Title = "File History"; + + [NonSerialized] private bool firstOnGUI = true; + [NonSerialized] private Texture selectedIcon; + + [SerializeField] private bool locked; + [SerializeField] private FileHistoryView fileHistoryView = new FileHistoryView(); + [SerializeField] private UnityEngine.Object selectedObject; + [SerializeField] private string selectedObjectAssetPath; + [SerializeField] private string selectedObjectRepositoryPath; + + public void SetSelectedPath(string assetPath) + { + selectedObject = null; + selectedObjectAssetPath = null; + selectedObjectRepositoryPath = null; + + if (selectedObjectAssetPath != NPath.Default) + { + selectedObjectAssetPath = assetPath; + selectedObject = AssetDatabase.LoadMainAssetAtPath(selectedObjectAssetPath); + + selectedObjectRepositoryPath = + Environment.GetRepositoryPath(assetPath.ToNPath()) + .ToString(SlashMode.Forward); + } + + LoadSelectedIcon(); + + Repository.UpdateFileLog(selectedObjectRepositoryPath) + .Start(); + } + + public override void Initialize(IApplicationManager applicationManager) + { + base.Initialize(applicationManager); + + fileHistoryView.InitializeView(this); + } + + public override bool IsBusy + { + get { return false; } + } + + public override void OnEnable() + { + base.OnEnable(); + + LoadSelectedIcon(); + + if (fileHistoryView != null) + fileHistoryView.OnEnable(); + } + + public override void OnDisable() + { + base.OnDisable(); + if (fileHistoryView != null) + fileHistoryView.OnDisable(); + } + + public override void OnDataUpdate() + { + base.OnDataUpdate(); + MaybeUpdateData(); + + if (fileHistoryView != null) + fileHistoryView.OnDataUpdate(); + } + + public override void OnRepositoryChanged(IRepository oldRepository) + { + base.OnRepositoryChanged(oldRepository); + + DetachHandlers(oldRepository); + AttachHandlers(Repository); + } + + public override void OnSelectionChange() + { + base.OnSelectionChange(); + if (fileHistoryView != null) + fileHistoryView.OnSelectionChange(); + + if (!locked) + { + selectedObject = Selection.activeObject; + + string assetPath = null; + if (selectedObject != null) + { + assetPath = AssetDatabase.GetAssetPath(selectedObject); + } + + SetSelectedPath(assetPath); + } + } + + public override void Refresh() + { + base.Refresh(); + if (fileHistoryView != null) + fileHistoryView.Refresh(); + Refresh(CacheType.GitFileLog); + Redraw(); + } + + public override void OnUI() + { + base.OnUI(); + + if (selectedObject != null) + { + GUILayout.BeginVertical(Styles.HeaderStyle); + { + DoHeaderGUI(); + + fileHistoryView.OnGUI(); + } + GUILayout.EndVertical(); + } + } + + private void MaybeUpdateData() + { + if (firstOnGUI) + { + titleContent = new GUIContent(Title, Styles.SmallLogo); + } + firstOnGUI = false; + } + + private void AttachHandlers(IRepository repository) + { + if (repository == null) + return; + } + + private void DetachHandlers(IRepository repository) + { + if (repository == null) + return; + } + + private void LoadSelectedIcon() + { + Texture nodeIcon = null; + + if (!string.IsNullOrEmpty(selectedObjectAssetPath)) + { + if (selectedObjectAssetPath.ToNPath().DirectoryExists()) + { + nodeIcon = Styles.FolderIcon; + } + else + { + nodeIcon = UnityEditorInternal.InternalEditorUtility.GetIconForFile(selectedObjectAssetPath); + } + + nodeIcon.hideFlags = HideFlags.HideAndDontSave; + } + + selectedIcon = nodeIcon; + } + + private void ShowButton(Rect rect) + { + EditorGUI.BeginChangeCheck(); + + locked = GUI.Toggle(rect, locked, GUIContent.none, Styles.LockButtonStyle); + + if (!EditorGUI.EndChangeCheck()) + return; + + this.OnSelectionChange(); + } + + private void DoHeaderGUI() + { + GUILayout.BeginHorizontal(Styles.HeaderBoxStyle); + { + var iconWidth = 32; + var iconHeight = 32; + + GUILayout.Label(selectedIcon, GUILayout.Height(iconWidth), GUILayout.Width(iconHeight)); + GUILayout.Space(16); + + GUILayout.BeginVertical(); + { + GUILayout.Label(selectedObjectAssetPath, Styles.FileHistoryLogTitleStyle); + + GUILayout.BeginHorizontal(); + { + GUILayout.FlexibleSpace(); + + if (GUILayout.Button("Show in Project")) + { + EditorGUIUtility.PingObject(selectedObject); + } + } + GUILayout.EndHorizontal(); + } + GUILayout.EndVertical(); + } + GUILayout.EndHorizontal(); + } + } +} diff --git a/src/UnityExtension/Assets/Editor/GitHub.Unity/UI/GitHubAuthenticationView.cs b/src/UnityExtension/Assets/Editor/GitHub.Unity/UI/GitHubAuthenticationView.cs new file mode 100644 index 000000000..921d190b0 --- /dev/null +++ b/src/UnityExtension/Assets/Editor/GitHub.Unity/UI/GitHubAuthenticationView.cs @@ -0,0 +1,310 @@ +using System; +using System.Threading; +using UnityEngine; +using UnityEditor; + +namespace GitHub.Unity +{ + [Serializable] + class GitHubAuthenticationView : Subview + { + private static readonly Vector2 viewSize = new Vector2(290, 290); + + private const string CredentialsNeedRefreshMessage = "We've detected that your stored credentials are out of sync with your current user. This can happen if you have signed in to git outside of Unity. Sign in again to refresh your credentials."; + private const string NeedAuthenticationMessage = "We need you to authenticate first"; + private const string WindowTitle = "Authenticate"; + private const string UsernameLabel = "Username"; + private const string PasswordLabel = "Password"; + private const string TwofaLabel = "2FA Code"; + private const string LoginButton = "Sign in"; + private const string BackButton = "Back"; + private const string AuthTitle = "Sign in to GitHub"; + private const string TwofaTitle = "Two-Factor Authentication"; + private const string TwofaDescription = "Open the two-factor authentication app on your device to view your 2FA code and verify your identity."; + private const string TwofaButton = "Verify"; + + [SerializeField] private Vector2 scroll; + [SerializeField] private string username = string.Empty; + [SerializeField] private string two2fa = string.Empty; + [SerializeField] private string message; + [SerializeField] private string errorMessage; + [SerializeField] private bool need2fa; + + [NonSerialized] private bool isBusy; + [NonSerialized] private bool enterPressed; + [NonSerialized] private string password = string.Empty; + [NonSerialized] private AuthenticationService authenticationService; + [NonSerialized] private string oAuthState; + [NonSerialized] private string oAuthOpenUrl; + + public override void InitializeView(IView parent) + { + Logger.Trace("InitializeView"); + + base.InitializeView(parent); + need2fa = isBusy = false; + message = errorMessage = null; + Title = WindowTitle; + Size = viewSize; + + oAuthState = Guid.NewGuid().ToString(); + oAuthOpenUrl = AuthenticationService.GetLoginUrl(oAuthState).ToString(); + + OAuthCallbackManager.OnCallback += OnOAuthCallback; + } + + public void Initialize(Exception exception) + { + var usernameMismatchException = exception as TokenUsernameMismatchException; + if (usernameMismatchException != null) + { + message = CredentialsNeedRefreshMessage; + username = usernameMismatchException.CachedUsername; + } + + var keychainEmptyException = exception as KeychainEmptyException; + if (keychainEmptyException != null) + { + message = NeedAuthenticationMessage; + } + + if (usernameMismatchException == null && keychainEmptyException == null) + { + message = exception.Message; + } + } + + public override void OnGUI() + { + HandleEnterPressed(); + + EditorGUIUtility.labelWidth = 90f; + + scroll = GUILayout.BeginScrollView(scroll); + { + GUILayout.BeginHorizontal(Styles.AuthHeaderBoxStyle); + { + GUILayout.Label(AuthTitle, Styles.HeaderRepoLabelStyle); + } + GUILayout.EndHorizontal(); + + GUILayout.BeginVertical(); + { + if (!need2fa) + { + OnGUILogin(); + } + else + { + OnGUI2FA(); + } + } + + GUILayout.EndVertical(); + } + GUILayout.EndScrollView(); + } + + private void HandleEnterPressed() + { + if (Event.current.type != EventType.KeyDown) + return; + + enterPressed = Event.current.keyCode == KeyCode.Return || Event.current.keyCode == KeyCode.KeypadEnter; + if (enterPressed) + Event.current.Use(); + } + + private void OnGUILogin() + { + EditorGUI.BeginDisabledGroup(isBusy); + { + ShowMessage(); + + EditorGUILayout.Space(); + + GUILayout.BeginHorizontal(); + { + username = EditorGUILayout.TextField(UsernameLabel, username, Styles.TextFieldStyle); + } + GUILayout.EndHorizontal(); + + EditorGUILayout.Space(); + + GUILayout.BeginHorizontal(); + { + password = EditorGUILayout.PasswordField(PasswordLabel, password, Styles.TextFieldStyle); + } + GUILayout.EndHorizontal(); + + EditorGUILayout.Space(); + + ShowErrorMessage(); + + GUILayout.Space(Styles.BaseSpacing + 3); + GUILayout.BeginHorizontal(); + { + GUILayout.FlexibleSpace(); + if (GUILayout.Button(LoginButton) || (!isBusy && enterPressed)) + { + GUI.FocusControl(null); + isBusy = true; + AuthenticationService.Login(username, password, DoRequire2fa, DoResult); + } + } + GUILayout.EndHorizontal(); + + if (OAuthCallbackManager.IsRunning) + { + GUILayout.Space(Styles.BaseSpacing + 3); + GUILayout.BeginHorizontal(); + { + GUILayout.FlexibleSpace(); + if (GUILayout.Button("Sign in with your browser", Styles.HyperlinkStyle)) + { + GUI.FocusControl(null); + Application.OpenURL(oAuthOpenUrl); + } + } + GUILayout.EndHorizontal(); + } + } + EditorGUI.EndDisabledGroup(); + } + + private void OnGUI2FA() + { + GUILayout.BeginVertical(); + { + GUILayout.Label(TwofaTitle, EditorStyles.boldLabel); + GUILayout.Label(TwofaDescription, EditorStyles.wordWrappedLabel); + + EditorGUI.BeginDisabledGroup(isBusy); + { + EditorGUILayout.Space(); + two2fa = EditorGUILayout.TextField(TwofaLabel, two2fa, Styles.TextFieldStyle); + EditorGUILayout.Space(); + ShowErrorMessage(); + + GUILayout.BeginHorizontal(); + { + GUILayout.FlexibleSpace(); + if (GUILayout.Button(BackButton)) + { + GUI.FocusControl(null); + Clear(); + } + + if (GUILayout.Button(TwofaButton) || (!isBusy && enterPressed)) + { + GUI.FocusControl(null); + isBusy = true; + AuthenticationService.LoginWith2fa(two2fa); + } + } + GUILayout.EndHorizontal(); + + EditorGUILayout.Space(); + } + EditorGUI.EndDisabledGroup(); + } + GUILayout.EndVertical(); + } + + private void OnOAuthCallback(string state, string code) + { + if (state.Equals(oAuthState)) + { + isBusy = true; + authenticationService.LoginWithOAuthCode(code, (b, s) => TaskManager.RunInUI(() => DoOAuthCodeResult(b, s))); + } + } + + private void DoRequire2fa(string msg) + { + need2fa = true; + errorMessage = msg; + isBusy = false; + Redraw(); + } + + private void Clear() + { + need2fa = false; + errorMessage = null; + isBusy = false; + Redraw(); + } + + private void DoResult(bool success, string msg) + { + isBusy = false; + if (success) + { + UsageTracker.IncrementAuthenticationViewButtonAuthentication(); + + Clear(); + Finish(true); + } + else + { + errorMessage = msg; + Redraw(); + } + } + + private void DoOAuthCodeResult(bool success, string msg) + { + isBusy = false; + if (success) + { + UsageTracker.IncrementAuthenticationViewButtonAuthentication(); + + Clear(); + Finish(true); + } + else + { + errorMessage = msg; + Redraw(); + } + } + + private void ShowMessage() + { + if (message != null) + { + EditorGUILayout.HelpBox(message, MessageType.Warning); + } + } + + private void ShowErrorMessage() + { + if (errorMessage != null) + { + EditorGUILayout.HelpBox(errorMessage, MessageType.Error); + } + } + + private AuthenticationService AuthenticationService + { + get + { + if (authenticationService == null) + { + AuthenticationService = new AuthenticationService(UriString.ToUriString(HostAddress.GitHubDotComHostAddress.WebUri), Platform.Keychain, Manager.ProcessManager, Manager.TaskManager, Environment); + } + return authenticationService; + } + set + { + authenticationService = value; + } + } + + public override bool IsBusy + { + get { return isBusy; } + } + } +} diff --git a/src/UnityExtension/Assets/Editor/GitHub.Unity/UI/GitHubEnterpriseAuthenticationView.cs b/src/UnityExtension/Assets/Editor/GitHub.Unity/UI/GitHubEnterpriseAuthenticationView.cs new file mode 100644 index 000000000..29f2e93e7 --- /dev/null +++ b/src/UnityExtension/Assets/Editor/GitHub.Unity/UI/GitHubEnterpriseAuthenticationView.cs @@ -0,0 +1,467 @@ +using System; +using System.Threading; +using UnityEngine; +using UnityEditor; + +namespace GitHub.Unity +{ + [Serializable] + class GitHubEnterpriseAuthenticationView : Subview + { + private static readonly Vector2 viewSize = new Vector2(290, 290); + + private const string CredentialsNeedRefreshMessage = "We've detected that your stored credentials are out of sync with your current user. This can happen if you have signed in to git outside of Unity. Sign in again to refresh your credentials."; + private const string NeedAuthenticationMessage = "We need you to authenticate first"; + private const string WindowTitle = "Authenticate"; + private const string ServerAddressLabel = "Server Address"; + private const string TokenLabel = "Token"; + private const string UsernameLabel = "Username"; + private const string PasswordLabel = "Password"; + private const string TwofaLabel = "2FA Code"; + private const string LoginButton = "Sign in"; + private const string BackButton = "Back"; + private const string AuthTitle = "Sign in to GitHub Enterprise"; + private const string TwofaTitle = "Two-Factor Authentication"; + private const string TwofaDescription = "Open the two-factor authentication app on your device to view your 2FA code and verify your identity."; + private const string TwofaButton = "Verify"; + + [SerializeField] private Vector2 scroll; + [SerializeField] private string serverAddress = string.Empty; + [SerializeField] private string username = string.Empty; + [SerializeField] private string two2fa = string.Empty; + [SerializeField] private string message; + [SerializeField] private string errorMessage; + [SerializeField] private bool need2fa; + [SerializeField] private bool hasServerMeta; + [SerializeField] private bool verifiablePasswordAuthentication; + + [NonSerialized] private bool isBusy; + [NonSerialized] private bool enterPressed; + [NonSerialized] private string password = string.Empty; + [NonSerialized] private string token = string.Empty; + [NonSerialized] private AuthenticationService authenticationService; + [NonSerialized] private string oAuthState; + [NonSerialized] private string oAuthOpenUrl; + + public override void InitializeView(IView parent) + { + base.InitializeView(parent); + need2fa = isBusy = false; + message = errorMessage = null; + Title = WindowTitle; + Size = viewSize; + + OAuthCallbackManager.OnCallback += OnOAuthCallback; + } + + public void Initialize(Exception exception) + { + var usernameMismatchException = exception as TokenUsernameMismatchException; + if (usernameMismatchException != null) + { + message = CredentialsNeedRefreshMessage; + username = usernameMismatchException.CachedUsername; + } + + var keychainEmptyException = exception as KeychainEmptyException; + if (keychainEmptyException != null) + { + message = NeedAuthenticationMessage; + } + + if (usernameMismatchException == null && keychainEmptyException == null) + { + message = exception.Message; + } + } + + public override void OnGUI() + { + HandleEnterPressed(); + + EditorGUIUtility.labelWidth = 90f; + + scroll = GUILayout.BeginScrollView(scroll); + { + GUILayout.BeginHorizontal(Styles.AuthHeaderBoxStyle); + { + GUILayout.Label(AuthTitle, Styles.HeaderRepoLabelStyle); + } + GUILayout.EndHorizontal(); + + GUILayout.BeginVertical(); + { + if (!hasServerMeta) + { + OnGUIHost(); + } + else + { + EditorGUILayout.Space(); + + EditorGUI.BeginDisabledGroup(true); + { + GUILayout.BeginHorizontal(); + { + serverAddress = EditorGUILayout.TextField(ServerAddressLabel, serverAddress, Styles.TextFieldStyle); + } + GUILayout.EndHorizontal(); + } + EditorGUI.EndDisabledGroup(); + + if (!need2fa) + { + if (verifiablePasswordAuthentication) + { + OnGUIUserPasswordLogin(); + } + else + { + OnGUITokenLogin(); + } + + if (OAuthCallbackManager.IsRunning) + { + GUILayout.Space(Styles.BaseSpacing + 3); + GUILayout.BeginHorizontal(); + { + GUILayout.FlexibleSpace(); + if (GUILayout.Button("Sign in with your browser", Styles.HyperlinkStyle)) + { + GUI.FocusControl(null); + Application.OpenURL(oAuthOpenUrl); + } + } + GUILayout.EndHorizontal(); + } + } + else + { + OnGUI2FA(); + } + } + } + + GUILayout.EndVertical(); + } + GUILayout.EndScrollView(); + } + + private void HandleEnterPressed() + { + if (Event.current.type != EventType.KeyDown) + return; + + enterPressed = Event.current.keyCode == KeyCode.Return || Event.current.keyCode == KeyCode.KeypadEnter; + if (enterPressed) + Event.current.Use(); + } + + private void OnGUIHost() + { + EditorGUI.BeginDisabledGroup(isBusy); + { + ShowMessage(); + + EditorGUILayout.Space(); + + GUILayout.BeginHorizontal(); + { + serverAddress = EditorGUILayout.TextField(ServerAddressLabel, serverAddress, Styles.TextFieldStyle); + } + GUILayout.EndHorizontal(); + + ShowErrorMessage(); + + GUILayout.Space(Styles.BaseSpacing + 3); + GUILayout.BeginHorizontal(); + { + GUILayout.FlexibleSpace(); + if (GUILayout.Button(LoginButton) || (!isBusy && enterPressed)) + { + GUI.FocusControl(null); + errorMessage = null; + isBusy = true; + + GetAuthenticationService(serverAddress) + .GetServerMeta(DoServerMetaResult, DoServerMetaError); + + Redraw(); + } + } + GUILayout.EndHorizontal(); + } + EditorGUI.EndDisabledGroup(); + } + + private void OnGUIUserPasswordLogin() + { + EditorGUI.BeginDisabledGroup(isBusy); + { + ShowMessage(); + + EditorGUILayout.Space(); + + GUILayout.BeginHorizontal(); + { + username = EditorGUILayout.TextField(UsernameLabel ,username, Styles.TextFieldStyle); + } + GUILayout.EndHorizontal(); + + EditorGUILayout.Space(); + + GUILayout.BeginHorizontal(); + { + password = EditorGUILayout.PasswordField(PasswordLabel, password, Styles.TextFieldStyle); + } + GUILayout.EndHorizontal(); + + ShowErrorMessage(); + + GUILayout.Space(Styles.BaseSpacing + 3); + GUILayout.BeginHorizontal(); + { + GUILayout.FlexibleSpace(); + if (GUILayout.Button("Back")) + { + GUI.FocusControl(null); + BackToGetServerMeta(); + } + + if (GUILayout.Button(LoginButton) || (!isBusy && enterPressed)) + { + GUI.FocusControl(null); + isBusy = true; + GetAuthenticationService(serverAddress) + .Login(username, password, DoRequire2fa, DoResult); + } + } + GUILayout.EndHorizontal(); + } + EditorGUI.EndDisabledGroup(); + } + + private void BackToGetServerMeta() + { + hasServerMeta = false; + oAuthOpenUrl = null; + oAuthState = null; + Redraw(); + } + + private void OnGUITokenLogin() + { + EditorGUI.BeginDisabledGroup(isBusy); + { + ShowMessage(); + + EditorGUILayout.Space(); + + GUILayout.BeginHorizontal(); + { + token = EditorGUILayout.TextField(TokenLabel, token, Styles.TextFieldStyle); + } + GUILayout.EndHorizontal(); + + ShowErrorMessage(); + + GUILayout.Space(Styles.BaseSpacing + 3); + GUILayout.BeginHorizontal(); + { + GUILayout.FlexibleSpace(); + if (GUILayout.Button("Back")) + { + GUI.FocusControl(null); + BackToGetServerMeta(); + } + + if (GUILayout.Button(LoginButton) || (!isBusy && enterPressed)) + { + GUI.FocusControl(null); + isBusy = true; + GetAuthenticationService(serverAddress) + .LoginWithToken(token, DoTokenResult); + } + } + GUILayout.EndHorizontal(); + } + EditorGUI.EndDisabledGroup(); + } + + private void OnGUI2FA() + { + GUILayout.BeginVertical(); + { + GUILayout.Label(TwofaTitle, EditorStyles.boldLabel); + GUILayout.Label(TwofaDescription, EditorStyles.wordWrappedLabel); + + EditorGUI.BeginDisabledGroup(isBusy); + { + EditorGUILayout.Space(); + two2fa = EditorGUILayout.TextField(TwofaLabel, two2fa, Styles.TextFieldStyle); + + ShowErrorMessage(); + + GUILayout.BeginHorizontal(); + { + GUILayout.FlexibleSpace(); + if (GUILayout.Button(BackButton)) + { + GUI.FocusControl(null); + Clear(); + } + + if (GUILayout.Button(TwofaButton) || (!isBusy && enterPressed)) + { + GUI.FocusControl(null); + isBusy = true; + GetAuthenticationService(serverAddress) + .LoginWith2fa(two2fa); + } + } + GUILayout.EndHorizontal(); + + EditorGUILayout.Space(); + } + EditorGUI.EndDisabledGroup(); + } + GUILayout.EndVertical(); + } + + private void OnOAuthCallback(string state, string code) + { + TaskManager.RunInUI(() => { + if (state.Equals(oAuthState)) + { + isBusy = true; + authenticationService.LoginWithOAuthCode(code, (b, s) => TaskManager.RunInUI(() => DoOAuthCodeResult(b, s))); + } + }); + } + + private void DoServerMetaResult(GitHubHostMeta gitHubHostMeta) + { + hasServerMeta = true; + verifiablePasswordAuthentication = gitHubHostMeta.VerifiablePasswordAuthentication; + isBusy = false; + Redraw(); + } + + private void DoServerMetaError(string message) + { + errorMessage = message; + hasServerMeta = false; + isBusy = false; + Redraw(); + } + + private void DoRequire2fa(string msg) + { + need2fa = true; + errorMessage = msg; + isBusy = false; + Redraw(); + } + + private void Clear() + { + need2fa = false; + errorMessage = null; + isBusy = false; + Redraw(); + } + + private void DoResult(bool success, string msg) + { + isBusy = false; + if (success) + { + UsageTracker.IncrementAuthenticationViewButtonAuthentication(); + + Clear(); + Finish(true); + } + else + { + errorMessage = msg; + Redraw(); + } + } + + private void DoTokenResult(bool success) + { + isBusy = false; + if (success) + { + UsageTracker.IncrementAuthenticationViewButtonAuthentication(); + + Clear(); + Finish(true); + } + else + { + errorMessage = "Error validating token."; + Redraw(); + } + } + + private void DoOAuthCodeResult(bool success, string msg) + { + isBusy = false; + if (success) + { + UsageTracker.IncrementAuthenticationViewButtonAuthentication(); + + Clear(); + Finish(true); + } + else + { + errorMessage = msg; + Redraw(); + } + } + + private void ShowMessage() + { + if (message != null) + { + EditorGUILayout.HelpBox(message, MessageType.Warning); + } + } + + private void ShowErrorMessage() + { + if (errorMessage != null) + { + EditorGUILayout.Space(); + + EditorGUILayout.HelpBox(errorMessage, MessageType.Error); + } + } + + private AuthenticationService GetAuthenticationService(string host) + { + if (authenticationService == null || authenticationService.HostAddress.WebUri.Host != host) + { + authenticationService + = new AuthenticationService( + host, + Platform.Keychain, + Manager.ProcessManager, + Manager.TaskManager, + Environment); + + oAuthState = Guid.NewGuid().ToString(); + oAuthOpenUrl = authenticationService.GetLoginUrl(oAuthState).ToString(); + } + + return authenticationService; + } + + public override bool IsBusy + { + get { return isBusy; } + } + } +} diff --git a/src/UnityExtension/Assets/Editor/GitHub.Unity/UI/HistoryView.cs b/src/UnityExtension/Assets/Editor/GitHub.Unity/UI/HistoryView.cs index 69b42e83e..c4b326c1e 100644 --- a/src/UnityExtension/Assets/Editor/GitHub.Unity/UI/HistoryView.cs +++ b/src/UnityExtension/Assets/Editor/GitHub.Unity/UI/HistoryView.cs @@ -7,7 +7,7 @@ namespace GitHub.Unity { [Serializable] - class HistoryControl + public class HistoryControl { private const string HistoryEntryDetailFormat = "{0} {1}"; @@ -302,90 +302,92 @@ public void ScrollTo(int index, float offset = 0f) } } - [Serializable] - class HistoryView : Subview + abstract class HistoryBase : Subview { - private const string CommitDetailsTitle = "Commit details"; - private const string ClearSelectionButton = "×"; - - [SerializeField] private bool currentLogHasUpdate; - [SerializeField] private bool currentTrackingStatusHasUpdate; + protected const string CommitDetailsTitle = "Commit details"; + protected const string ClearSelectionButton = "×"; - [SerializeField] private HistoryControl historyControl; - [SerializeField] private GitLogEntry selectedEntry = GitLogEntry.Default; - - [SerializeField] private Vector2 detailsScroll; - - [SerializeField] private List logEntries = new List(); - - [SerializeField] private int statusAhead; - - [SerializeField] private ChangesTree treeChanges = new ChangesTree { IsSelectable = false, DisplayRootNode = false }; - - [SerializeField] private CacheUpdateEvent lastLogChangedEvent; - [SerializeField] private CacheUpdateEvent lastTrackingStatusChangedEvent; + protected abstract HistoryControl HistoryControl { get; set; } + protected abstract GitLogEntry SelectedEntry { get; set; } + protected abstract ChangesTree TreeChanges { get; set; } + protected abstract Vector2 DetailsScroll { get; set; } - public override void OnEnable() + protected void BuildHistoryControl(int loadAhead, List gitLogEntries) { - base.OnEnable(); - - if (treeChanges != null) + if (HistoryControl == null) { - treeChanges.ViewHasFocus = HasFocus; - treeChanges.UpdateIcons(Styles.FolderIcon); + HistoryControl = new HistoryControl(); } - AttachHandlers(Repository); - ValidateCachedData(Repository); + HistoryControl.Load(loadAhead, gitLogEntries); + if (!SelectedEntry.Equals(GitLogEntry.Default) + && SelectedEntry.CommitID != HistoryControl.SelectedGitLogEntry.CommitID) + { + SelectedEntry = GitLogEntry.Default; + } } - public override void OnDisable() + protected void BuildTreeChanges() { - base.OnDisable(); - DetachHandlers(Repository); + TreeChanges.PathSeparator = Environment.FileSystem.DirectorySeparatorChar.ToString(); + TreeChanges.Load(SelectedEntry.changes.Select(entry => new GitStatusEntryTreeData(entry))); + Redraw(); } - public override void Refresh() + protected void RevertCommit() { - base.Refresh(); - Refresh(CacheType.GitLog); - Refresh(CacheType.GitAheadBehind); - } + var dialogTitle = "Revert commit"; + var dialogBody = string.Format(@"Are you sure you want to revert the following commit:""{0}""", SelectedEntry.Summary); - public override void OnDataUpdate() - { - base.OnDataUpdate(); - MaybeUpdateData(); + if (EditorUtility.DisplayDialog(dialogTitle, dialogBody, "Revert", "Cancel")) + { + Repository + .Revert(SelectedEntry.CommitID) + .FinallyInUI((success, e) => { + if (!success) + { + EditorUtility.DisplayDialog(dialogTitle, + "Error reverting commit: " + e.Message, Localization.Cancel); + } + AssetDatabase.Refresh(); + }) + .Start(); + } } - public override void OnFocusChanged() + protected void HistoryDetailsEntry(GitLogEntry entry) { - base.OnFocusChanged(); - var hasFocus = HasFocus; - if (treeChanges.ViewHasFocus != hasFocus) - { - treeChanges.ViewHasFocus = hasFocus; - Redraw(); - } + GUILayout.BeginVertical(Styles.HeaderBoxStyle); + GUILayout.Label(entry.Summary, Styles.HistoryDetailsTitleStyle); + + GUILayout.Space(-5); + + GUILayout.BeginHorizontal(); + GUILayout.Label(entry.PrettyTimeString, Styles.HistoryDetailsMetaInfoStyle); + GUILayout.Label(entry.AuthorName, Styles.HistoryDetailsMetaInfoStyle); + GUILayout.FlexibleSpace(); + GUILayout.EndHorizontal(); + + GUILayout.Space(3); + GUILayout.EndVertical(); } - public override void OnGUI() + protected void DoHistoryGui(Rect rect, Action historyControlRightClick = null, + Action changesTreeRightClick = null) { - var rect = GUILayoutUtility.GetLastRect(); - if (historyControl != null) + if (HistoryControl != null) { var historyControlRect = new Rect(0f, 0f, Position.width, Position.height - rect.height); - var requiresRepaint = historyControl.Render(historyControlRect, - entry => { - selectedEntry = entry; - BuildTree(); + var requiresRepaint = HistoryControl.Render(historyControlRect, + singleClick: entry => { + SelectedEntry = entry; + BuildTreeChanges(); }, - entry => { }, entry => { - GenericMenu menu = new GenericMenu(); - menu.AddItem(new GUIContent("Revert"), false, RevertCommit); - menu.ShowAsContext(); - }); + doubleClick: entry => { + + }, + rightClick: historyControlRightClick); if (requiresRepaint) Redraw(); @@ -393,27 +395,28 @@ public override void OnGUI() DoProgressGUI(); - if (!selectedEntry.Equals(GitLogEntry.Default)) + if (!SelectedEntry.Equals(GitLogEntry.Default)) { // Top bar for scrolling to selection or clearing it GUILayout.BeginHorizontal(EditorStyles.toolbar); { if (GUILayout.Button(CommitDetailsTitle, Styles.ToolbarButtonStyle)) { - historyControl.ScrollTo(historyControl.SelectedIndex); + HistoryControl.ScrollTo(HistoryControl.SelectedIndex); } + if (GUILayout.Button(ClearSelectionButton, Styles.ToolbarButtonStyle, GUILayout.ExpandWidth(false))) { - selectedEntry = GitLogEntry.Default; - historyControl.SelectedIndex = -1; + SelectedEntry = GitLogEntry.Default; + HistoryControl.SelectedIndex = -1; } } GUILayout.EndHorizontal(); // Log entry details - including changeset tree (if any changes are found) - detailsScroll = GUILayout.BeginScrollView(detailsScroll, GUILayout.Height(250)); + DetailsScroll = GUILayout.BeginScrollView(DetailsScroll, GUILayout.Height(250)); { - HistoryDetailsEntry(selectedEntry); + HistoryDetailsEntry(SelectedEntry); GUILayout.Space(EditorGUIUtility.standardVerticalSpacing); GUILayout.Label("Files changed", EditorStyles.boldLabel); @@ -424,24 +427,23 @@ public override void OnGUI() GUILayout.BeginVertical(); { var borderLeft = Styles.Label.margin.left; - var treeControlRect = new Rect(rect.x + borderLeft, rect.y, Position.width - borderLeft * 2, Position.height - rect.height + Styles.CommitAreaPadding); + var treeControlRect = new Rect(rect.x + borderLeft, rect.y, Position.width - borderLeft * 2, + Position.height - rect.height + Styles.CommitAreaPadding); var treeRect = new Rect(0f, 0f, 0f, 0f); - if (treeChanges != null) + if (TreeChanges != null) { - treeChanges.FolderStyle = Styles.Foldout; - treeChanges.TreeNodeStyle = Styles.TreeNode; - treeChanges.ActiveTreeNodeStyle = Styles.ActiveTreeNode; - treeChanges.FocusedTreeNodeStyle = Styles.FocusedTreeNode; - treeChanges.FocusedActiveTreeNodeStyle = Styles.FocusedActiveTreeNode; - - treeRect = treeChanges.Render(treeControlRect, detailsScroll, - node => { }, - node => { - }, - node => { - }); - - if (treeChanges.RequiresRepaint) + TreeChanges.FolderStyle = Styles.Foldout; + TreeChanges.TreeNodeStyle = Styles.TreeNode; + TreeChanges.ActiveTreeNodeStyle = Styles.ActiveTreeNode; + TreeChanges.FocusedTreeNodeStyle = Styles.FocusedTreeNode; + TreeChanges.FocusedActiveTreeNodeStyle = Styles.FocusedActiveTreeNode; + + treeRect = TreeChanges.Render(treeControlRect, DetailsScroll, + singleClick: node => { }, + doubleClick: node => { }, + rightClick: changesTreeRightClick); + + if (TreeChanges.RequiresRepaint) Redraw(); } @@ -456,43 +458,74 @@ public override void OnGUI() } } - private void HistoryDetailsEntry(GitLogEntry entry) + public override void OnEnable() { - GUILayout.BeginVertical(Styles.HeaderBoxStyle); - GUILayout.Label(entry.Summary, Styles.HistoryDetailsTitleStyle); + base.OnEnable(); - GUILayout.Space(-5); + if (TreeChanges != null) + { + TreeChanges.ViewHasFocus = HasFocus; + TreeChanges.UpdateIcons(Styles.FolderIcon); + } - GUILayout.BeginHorizontal(); - GUILayout.Label(entry.PrettyTimeString, Styles.HistoryDetailsMetaInfoStyle); - GUILayout.Label(entry.AuthorName, Styles.HistoryDetailsMetaInfoStyle); - GUILayout.FlexibleSpace(); - GUILayout.EndHorizontal(); + AttachHandlers(Repository); + ValidateCachedData(Repository); + } - GUILayout.Space(3); - GUILayout.EndVertical(); + public override void OnDisable() + { + base.OnDisable(); + DetachHandlers(Repository); } - private void RevertCommit() + public override void OnDataUpdate() { - var dialogTitle = "Revert commit"; - var dialogBody = string.Format(@"Are you sure you want to revert the following commit:""{0}""", selectedEntry.Summary); + base.OnDataUpdate(); + MaybeUpdateData(); + } - if (EditorUtility.DisplayDialog(dialogTitle, dialogBody, "Revert", "Cancel")) + public override void OnFocusChanged() + { + base.OnFocusChanged(); + var hasFocus = HasFocus; + if (TreeChanges.ViewHasFocus != hasFocus) { - Repository - .Revert(selectedEntry.CommitID) - .FinallyInUI((success, e) => { - if (!success) - { - EditorUtility.DisplayDialog(dialogTitle, - "Error reverting commit: " + e.Message, Localization.Cancel); - } - }) - .Start(); + TreeChanges.ViewHasFocus = hasFocus; + Redraw(); } } + protected abstract void AttachHandlers(IRepository repository); + protected abstract void DetachHandlers(IRepository repository); + protected abstract void ValidateCachedData(IRepository repository); + protected abstract void MaybeUpdateData(); + } + + [Serializable] + class HistoryView : HistoryBase + { + [SerializeField] private bool currentLogHasUpdate; + [SerializeField] private bool currentTrackingStatusHasUpdate; + + [SerializeField] private List logEntries = new List(); + + [SerializeField] private int statusAhead; + + [SerializeField] private CacheUpdateEvent lastLogChangedEvent; + [SerializeField] private CacheUpdateEvent lastTrackingStatusChangedEvent; + + [SerializeField] private HistoryControl historyControl; + [SerializeField] private GitLogEntry selectedEntry = GitLogEntry.Default; + [SerializeField] private ChangesTree treeChanges = new ChangesTree { DisplayRootNode = false }; + [SerializeField] private Vector2 detailsScroll; + + public override void Refresh() + { + base.Refresh(); + Refresh(CacheType.GitLog); + Refresh(CacheType.GitAheadBehind); + } + private void RepositoryOnTrackingStatusChanged(CacheUpdateEvent cacheUpdateEvent) { if (!lastTrackingStatusChangedEvent.Equals(cacheUpdateEvent)) @@ -515,7 +548,7 @@ private void RepositoryOnLogChanged(CacheUpdateEvent cacheUpdateEvent) } } - private void AttachHandlers(IRepository repository) + protected override void AttachHandlers(IRepository repository) { if (repository == null) { @@ -526,7 +559,7 @@ private void AttachHandlers(IRepository repository) repository.LogChanged += RepositoryOnLogChanged; } - private void DetachHandlers(IRepository repository) + protected override void DetachHandlers(IRepository repository) { if (repository == null) { @@ -537,13 +570,13 @@ private void DetachHandlers(IRepository repository) repository.LogChanged -= RepositoryOnLogChanged; } - private void ValidateCachedData(IRepository repository) + protected override void ValidateCachedData(IRepository repository) { repository.CheckAndRaiseEventsIfCacheNewer(CacheType.GitLog, lastLogChangedEvent); repository.CheckAndRaiseEventsIfCacheNewer(CacheType.GitAheadBehind, lastTrackingStatusChangedEvent); } - private void MaybeUpdateData() + protected override void MaybeUpdateData() { if (Repository == null) { @@ -563,30 +596,205 @@ private void MaybeUpdateData() logEntries = Repository.CurrentLog; - BuildHistoryControl(); + BuildHistoryControl(statusAhead, logEntries); + } + } + + public override void OnGUI() + { + var lastRect = GUILayoutUtility.GetLastRect(); + DoHistoryGui(lastRect, entry => { + GenericMenu menu = new GenericMenu(); + menu.AddItem(new GUIContent("Revert"), false, RevertCommit); + menu.ShowAsContext(); + }, node => { + var menu = CreateChangesTreeContextMenu(node); + menu.ShowAsContext(); + }); + } + + protected override HistoryControl HistoryControl + { + get { return historyControl; } + set { historyControl = value; } + } + + protected override GitLogEntry SelectedEntry + { + get { return selectedEntry; } + set { selectedEntry = value; } + } + + protected override ChangesTree TreeChanges + { + get { return treeChanges; } + set { treeChanges = value; } + } + + protected override Vector2 DetailsScroll + { + get { return detailsScroll; } + set { detailsScroll = value; } + } + + private GenericMenu CreateChangesTreeContextMenu(ChangesTreeNode node) + { + var genericMenu = new GenericMenu(); + + genericMenu.AddItem(new GUIContent("Show History"), false, () => { }); + + return genericMenu; + } + } + + [Serializable] + class FileHistoryView : HistoryBase + { + [SerializeField] private bool currentFileLogHasUpdate; + [SerializeField] private bool currentStatusEntriesHasUpdate; + + [SerializeField] private GitFileLog gitFileLog; + + [SerializeField] private HistoryControl historyControl; + [SerializeField] private GitLogEntry selectedEntry = GitLogEntry.Default; + [SerializeField] private ChangesTree treeChanges = new ChangesTree { DisplayRootNode = false }; + [SerializeField] private Vector2 detailsScroll; + [SerializeField] private List gitStatusEntries = new List(); + + [SerializeField] private CacheUpdateEvent lastStatusEntriesChangedEvent; + [SerializeField] private CacheUpdateEvent lastFileLogChangedEvent; + + public override void Refresh() + { + base.Refresh(); + Refresh(CacheType.GitLog); + Refresh(CacheType.GitAheadBehind); + } + + private void RepositoryOnFileLogChanged(CacheUpdateEvent cacheUpdateEvent) + { + if (!lastFileLogChangedEvent.Equals(cacheUpdateEvent)) + { + ReceivedEvent(cacheUpdateEvent.cacheType); + lastFileLogChangedEvent = cacheUpdateEvent; + currentFileLogHasUpdate = true; + Redraw(); } } - private void BuildHistoryControl() + private void RepositoryOnStatusEntriesChanged(CacheUpdateEvent cacheUpdateEvent) { - if (historyControl == null) + if (!lastStatusEntriesChangedEvent.Equals(cacheUpdateEvent)) { - historyControl = new HistoryControl(); + ReceivedEvent(cacheUpdateEvent.cacheType); + lastStatusEntriesChangedEvent = cacheUpdateEvent; + currentStatusEntriesHasUpdate = true; + Redraw(); } + } - historyControl.Load(statusAhead, logEntries); - if (!selectedEntry.Equals(GitLogEntry.Default) - && selectedEntry.CommitID != historyControl.SelectedGitLogEntry.CommitID) + protected override void AttachHandlers(IRepository repository) + { + if (repository == null) { - selectedEntry = GitLogEntry.Default; + return; } + + repository.FileLogChanged += RepositoryOnFileLogChanged; + repository.StatusEntriesChanged += RepositoryOnStatusEntriesChanged; } - private void BuildTree() + protected override void DetachHandlers(IRepository repository) { - treeChanges.PathSeparator = Environment.FileSystem.DirectorySeparatorChar.ToString(); - treeChanges.Load(selectedEntry.changes.Select(entry => new GitStatusEntryTreeData(entry))); - Redraw(); + if (repository == null) + { + return; + } + + repository.FileLogChanged -= RepositoryOnFileLogChanged; + repository.FileLogChanged -= RepositoryOnStatusEntriesChanged; + } + + protected override void ValidateCachedData(IRepository repository) + { + repository.CheckAndRaiseEventsIfCacheNewer(CacheType.GitFileLog, lastFileLogChangedEvent); + } + + protected override void MaybeUpdateData() + { + if (Repository == null) + { + return; + } + + if (currentFileLogHasUpdate) + { + currentFileLogHasUpdate = false; + + gitFileLog = Repository.CurrentFileLog; + + BuildHistoryControl(0, gitFileLog.LogEntries); + } + + if (currentStatusEntriesHasUpdate) + { + currentStatusEntriesHasUpdate = false; + + gitStatusEntries = Repository.CurrentChanges; + } + } + + public override void OnGUI() + { + var lastRect = GUILayoutUtility.GetLastRect(); + DoHistoryGui(lastRect, entry => { + GenericMenu menu = new GenericMenu(); + string checkoutPrompt = string.Format("Checkout revision {0}", entry.ShortID); + menu.AddItem(new GUIContent(checkoutPrompt), false, () => Checkout(entry.commitID)); + menu.ShowAsContext(); + }, node => { + }); + } + + protected override HistoryControl HistoryControl + { + get { return historyControl; } + set { historyControl = value; } + } + + protected override GitLogEntry SelectedEntry + { + get { return selectedEntry; } + set { selectedEntry = value; } + } + + protected override ChangesTree TreeChanges + { + get { return treeChanges; } + set { treeChanges = value; } + } + + protected override Vector2 DetailsScroll + { + get { return detailsScroll; } + set { detailsScroll = value; } + } + + private const string ConfirmCheckoutTitle = "Discard Changes?"; + private const string ConfirmCheckoutMessage = "You've made changes to file '{0}'. Overwrite these changes with the historical version?"; + private const string ConfirmCheckoutOK = "Overwrite"; + private const string ConfirmCheckoutCancel = "Cancel"; + + protected void Checkout(string commitId) + { + var promptUser = gitStatusEntries.Count > 0 && gitStatusEntries.Any(statusEntry => gitFileLog.Path.Equals(statusEntry.Path.ToNPath())); + + if (!promptUser || EditorUtility.DisplayDialog(ConfirmCheckoutTitle, string.Format(ConfirmCheckoutMessage, gitFileLog.Path), ConfirmCheckoutOK, ConfirmCheckoutCancel)) + { + Repository.CheckoutVersion(commitId, new string[] { gitFileLog.Path }) + .ThenInUI(AssetDatabase.Refresh) + .Start(); + } } } } diff --git a/src/UnityExtension/Assets/Editor/GitHub.Unity/UI/LfsLocksModificationProcessor.cs b/src/UnityExtension/Assets/Editor/GitHub.Unity/UI/LfsLocksModificationProcessor.cs index a4bf74cb9..cbdec46e8 100644 --- a/src/UnityExtension/Assets/Editor/GitHub.Unity/UI/LfsLocksModificationProcessor.cs +++ b/src/UnityExtension/Assets/Editor/GitHub.Unity/UI/LfsLocksModificationProcessor.cs @@ -2,6 +2,7 @@ using System.Linq; using GitHub.Logging; using UnityEditor; +using UnityEngine; namespace GitHub.Unity { @@ -21,8 +22,12 @@ public static void Initialize(IEnvironment env, IPlatform plat) environment = env; platform = plat; platform.Keychain.ConnectionsChanged += UserMayHaveChanged; + // we need to do this to get the initial user information up front + UserMayHaveChanged(); repository = environment.Repository; + UnityShim.Editor_finishedDefaultHeaderGUI += InspectorHeaderFinished; + if (repository != null) { repository.LocksChanged += RepositoryOnLocksChanged; @@ -37,19 +42,27 @@ public static string[] OnWillSaveAssets(string[] paths) public static AssetMoveResult OnWillMoveAsset(string oldPath, string newPath) { - return IsLocked(oldPath) || IsLocked(newPath) ? AssetMoveResult.FailedMove : AssetMoveResult.DidNotMove; + return IsLockedBySomeoneElse(oldPath) || IsLockedBySomeoneElse(newPath) ? AssetMoveResult.FailedMove : AssetMoveResult.DidNotMove; } public static AssetDeleteResult OnWillDeleteAsset(string assetPath, RemoveAssetOptions option) { - return IsLocked(assetPath) ? AssetDeleteResult.FailedDelete : AssetDeleteResult.DidNotDelete; + return IsLockedBySomeoneElse(assetPath) ? AssetDeleteResult.FailedDelete : AssetDeleteResult.DidNotDelete; } + // Returns true if this file can be edited by this user public static bool IsOpenForEdit(string assetPath, out string message) { var lck = GetLock(assetPath); - message = lck.HasValue ? "File is locked for editing by " + lck.Value.Owner : null; - return !lck.HasValue; + bool canEdit = true; + if (assetPath.EndsWith(".meta")) + { + canEdit &= !IsLockedBySomeoneElse(lck); + assetPath = assetPath.TrimEnd(".meta"); + } + canEdit &= !IsLockedBySomeoneElse(lck); + message = !canEdit ? string.Format("File is locked for editing by {0}", lck.Value.Owner.Name) : null; + return canEdit; } private static void RepositoryOnLocksChanged(CacheUpdateEvent cacheUpdateEvent) @@ -66,9 +79,14 @@ private static void UserMayHaveChanged() loggedInUser = platform.Keychain.Connections.Select(x => x.Username).FirstOrDefault(); } - private static bool IsLocked(string assetPath) + private static bool IsLockedBySomeoneElse(GitLock? lck) { - return GetLock(assetPath).HasValue; + return lck.HasValue && !lck.Value.Owner.Name.Equals(loggedInUser); + } + + private static bool IsLockedBySomeoneElse(string assetPath) + { + return IsLockedBySomeoneElse(GetLock(assetPath)); } private static GitLock? GetLock(string assetPath) @@ -78,9 +96,42 @@ private static bool IsLocked(string assetPath) GitLock lck; var repositoryPath = environment.GetRepositoryPath(assetPath.ToNPath()); - if (!locks.TryGetValue(repositoryPath, out lck) || lck.Owner.Name.Equals(loggedInUser)) - return null; - return lck; + if (locks.TryGetValue(repositoryPath, out lck)) + return lck; + return null; + } + + private static void InspectorHeaderFinished(Editor editor) + { + string message = ""; + if (!IsOpenForEdit(AssetDatabase.GetAssetPath(editor.target), out message)) + { + var enabled = GUI.enabled; + GUI.enabled = true; + GUILayout.BeginVertical(); + { + GUILayout.Space(9); + GUILayout.BeginHorizontal(); + { + GUILayout.BeginVertical(GUILayout.Width(32)); + { + GUILayout.Label(Utility.GetIcon("big-logo.png", "big-logo@2x.png", Utility.IsDarkTheme), GUILayout.Width(32), GUILayout.Height(32)); + } + GUILayout.EndVertical(); + + GUILayout.BeginVertical(); + { + GUILayout.Space(9); + GUILayout.Label(message, Styles.HeaderBranchLabelStyle); + } + GUILayout.EndVertical(); + } + GUILayout.EndHorizontal(); + } + GUILayout.EndVertical(); + GUI.enabled = enabled; + } + } } } diff --git a/src/UnityExtension/Assets/Editor/GitHub.Unity/UI/LocksView.cs b/src/UnityExtension/Assets/Editor/GitHub.Unity/UI/LocksView.cs index 68208cc74..0b22ee253 100644 --- a/src/UnityExtension/Assets/Editor/GitHub.Unity/UI/LocksView.cs +++ b/src/UnityExtension/Assets/Editor/GitHub.Unity/UI/LocksView.cs @@ -119,7 +119,7 @@ public bool Render(Rect containingRect, Action singleClick = null, visibleItems[entry.GitLock.ID] = shouldRenderEntry; } - if (visibleItems[entry.GitLock.ID]) + if (visibleItems.ContainsKey(entry.GitLock.ID) && visibleItems[entry.GitLock.ID]) { entryRect = RenderEntry(entryRect, entry); } @@ -383,13 +383,14 @@ class LocksView : Subview { [NonSerialized] private bool isBusy; + [SerializeField] private bool currentRemoteHasUpdate; [SerializeField] private bool currentStatusEntriesHasUpdate; [SerializeField] private bool currentLocksHasUpdate; - [SerializeField] private bool currentUserHasUpdate; + [SerializeField] private bool keychainHasUpdate; [SerializeField] private LocksControl locksControl; + [SerializeField] private CacheUpdateEvent lastCurrentRemoteChangedEvent; [SerializeField] private CacheUpdateEvent lastLocksChangedEvent; [SerializeField] private CacheUpdateEvent lastStatusEntriesChangedEvent; - [SerializeField] private CacheUpdateEvent lastUserChangedEvent; [SerializeField] private List lockedFiles = new List(); [SerializeField] private List gitStatusEntries = new List(); [SerializeField] private string currentUsername; @@ -407,6 +408,7 @@ public override void OnEnable() AttachHandlers(Repository); ValidateCachedData(Repository); + KeychainConnectionsChanged(); } public override void OnDisable() @@ -523,9 +525,10 @@ private void AttachHandlers(IRepository repository) return; } + Platform.Keychain.ConnectionsChanged += KeychainConnectionsChanged; + repository.CurrentRemoteChanged += RepositoryOnCurrentRemoteChanged; repository.LocksChanged += RepositoryOnLocksChanged; - repository.LocksChanged += RepositoryOnStatusEntriesChanged; - User.Changed += UserOnChanged; + repository.StatusEntriesChanged += RepositoryOnStatusEntriesChanged; } private void DetachHandlers(IRepository repository) @@ -535,9 +538,20 @@ private void DetachHandlers(IRepository repository) return; } + Platform.Keychain.ConnectionsChanged -= KeychainConnectionsChanged; + repository.CurrentRemoteChanged -= RepositoryOnCurrentRemoteChanged; repository.LocksChanged -= RepositoryOnLocksChanged; - repository.LocksChanged -= RepositoryOnStatusEntriesChanged; - User.Changed -= UserOnChanged; + repository.StatusEntriesChanged -= RepositoryOnStatusEntriesChanged; + } + + private void RepositoryOnCurrentRemoteChanged(CacheUpdateEvent cacheUpdateEvent) + { + if (!lastCurrentRemoteChangedEvent.Equals(cacheUpdateEvent)) + { + lastCurrentRemoteChangedEvent = cacheUpdateEvent; + currentRemoteHasUpdate = true; + Redraw(); + } } private void RepositoryOnLocksChanged(CacheUpdateEvent cacheUpdateEvent) @@ -560,21 +574,17 @@ private void RepositoryOnStatusEntriesChanged(CacheUpdateEvent cacheUpdateEvent) } } - private void UserOnChanged(CacheUpdateEvent cacheUpdateEvent) + private void KeychainConnectionsChanged() { - if (!lastUserChangedEvent.Equals(cacheUpdateEvent)) - { - lastUserChangedEvent = cacheUpdateEvent; - currentUserHasUpdate = true; - Redraw(); - } + keychainHasUpdate = true; + Redraw(); } private void ValidateCachedData(IRepository repository) { + repository.CheckAndRaiseEventsIfCacheNewer(CacheType.RepositoryInfo, lastCurrentRemoteChangedEvent); repository.CheckAndRaiseEventsIfCacheNewer(CacheType.GitLocks, lastLocksChangedEvent); repository.CheckAndRaiseEventsIfCacheNewer(CacheType.GitStatus, lastStatusEntriesChangedEvent); - User.CheckAndRaiseEventsIfCacheNewer(CacheType.GitUser, lastUserChangedEvent); } private void MaybeUpdateData() @@ -584,15 +594,35 @@ private void MaybeUpdateData() return; } - if (currentUserHasUpdate) + if (keychainHasUpdate || currentRemoteHasUpdate) { - //TODO: ONE_USER_LOGIN This assumes only ever one user can login - var keychainConnection = Platform.Keychain.Connections.FirstOrDefault(); - if (keychainConnection != null) - currentUsername = keychainConnection.Username; - else - currentUsername = ""; - currentUserHasUpdate = false; + var username = String.Empty; + if (Repository != null) + { + Connection connection; + if (!string.IsNullOrEmpty(Repository.CloneUrl)) + { + var host = Repository.CloneUrl + .ToRepositoryUri() + .GetComponents(UriComponents.Host, UriFormat.SafeUnescaped); + + connection = Platform.Keychain.Connections.FirstOrDefault(x => x.Host == host); + } + else + { + connection = Platform.Keychain.Connections.FirstOrDefault(HostAddress.IsGitHubDotCom); + } + + if (connection != null) + { + username = connection.Username; + } + } + + currentUsername = username; + + keychainHasUpdate = false; + currentRemoteHasUpdate = false; } if (currentLocksHasUpdate) diff --git a/src/UnityExtension/Assets/Editor/GitHub.Unity/UI/PopupWindow.cs b/src/UnityExtension/Assets/Editor/GitHub.Unity/UI/PopupWindow.cs index ea7cd5e6d..406a5e178 100644 --- a/src/UnityExtension/Assets/Editor/GitHub.Unity/UI/PopupWindow.cs +++ b/src/UnityExtension/Assets/Editor/GitHub.Unity/UI/PopupWindow.cs @@ -1,11 +1,13 @@ using System; +using System.Collections.Generic; +using System.Linq; using UnityEditor; using UnityEngine; namespace GitHub.Unity { [Serializable] - class PopupWindow : BaseWindow + public class PopupWindow : BaseWindow { public enum PopupViewType { @@ -14,8 +16,6 @@ public enum PopupViewType AuthenticationView, } - [NonSerialized] private IApiClient client; - [SerializeField] private PopupViewType activeViewType; [SerializeField] private AuthenticationView authenticationView; [SerializeField] private LoadingView loadingView; @@ -114,17 +114,35 @@ private void Open(PopupViewType popupViewType, Action onClose) OnClose = null; var viewNeedsAuthentication = popupViewType == PopupViewType.PublishView; + if (viewNeedsAuthentication) { - Client.GetCurrentUser(user => + var userHasAuthentication = false; + foreach (var keychainConnection in Platform.Keychain.Connections.OrderByDescending(HostAddress.IsGitHubDotCom)) + { + var apiClient = new ApiClient(Platform.Keychain, Platform.ProcessManager, TaskManager, + Environment, keychainConnection.Host); + + try + { + apiClient.EnsureValidCredentials(); + userHasAuthentication = true; + break; + } + catch (Exception ex) + { + Logger.Trace(ex, "Exception validating host {0}", keychainConnection.Host); + } + } + + if (userHasAuthentication) { OpenInternal(popupViewType, onClose); shouldCloseOnFinish = true; - - }, - exception => + } + else { - authenticationView.Initialize(exception); + authenticationView.Initialize(null); OpenInternal(PopupViewType.AuthenticationView, completedAuthentication => { if (completedAuthentication) @@ -132,8 +150,9 @@ private void Open(PopupViewType popupViewType, Action onClose) Open(popupViewType, onClose); } }); + shouldCloseOnFinish = false; - }); + } } else { @@ -168,30 +187,6 @@ private void SwitchView(Subview fromView, Subview toView) Repaint(); } - public IApiClient Client - { - get - { - if (client == null) - { - var repository = Environment.Repository; - UriString host; - if (repository != null && !string.IsNullOrEmpty(repository.CloneUrl)) - { - host = repository.CloneUrl.ToRepositoryUrl(); - } - else - { - host = UriString.ToUriString(HostAddress.GitHubDotComHostAddress.WebUri); - } - - client = new ApiClient(host, Platform.Keychain, Manager.ProcessManager, TaskManager, Environment.NodeJsExecutablePath, Environment.OctorunScriptPath); - } - - return client; - } - } - private Subview ActiveView { get diff --git a/src/UnityExtension/Assets/Editor/GitHub.Unity/UI/ProjectWindowInterface.cs b/src/UnityExtension/Assets/Editor/GitHub.Unity/UI/ProjectWindowInterface.cs index 9a3d66574..9e9a3547e 100644 --- a/src/UnityExtension/Assets/Editor/GitHub.Unity/UI/ProjectWindowInterface.cs +++ b/src/UnityExtension/Assets/Editor/GitHub.Unity/UI/ProjectWindowInterface.cs @@ -18,6 +18,7 @@ class ProjectWindowInterface : AssetPostprocessor private static List locks = new List(); private static List guids = new List(); private static List guidsLocks = new List(); + private static string currentUsername; private static IApplicationManager manager; private static bool isBusy = false; @@ -25,8 +26,10 @@ class ProjectWindowInterface : AssetPostprocessor private static ILogging Logger { get { return logger = logger ?? LogHelper.GetLogger(); } } private static CacheUpdateEvent lastRepositoryStatusChangedEvent; private static CacheUpdateEvent lastLocksChangedEvent; + private static CacheUpdateEvent lastCurrentRemoteChangedEvent; private static IRepository Repository { get { return manager != null ? manager.Environment.Repository : null; } } - private static bool IsInitialized { get { return Repository != null && Repository.CurrentRemote.HasValue; } } + private static IPlatform Platform { get { return manager != null ? manager.Platform : null; } } + private static bool IsInitialized { get { return Repository != null; } } public static void Initialize(IApplicationManager theManager) { @@ -35,10 +38,14 @@ public static void Initialize(IApplicationManager theManager) manager = theManager; + Platform.Keychain.ConnectionsChanged += UpdateCurrentUsername; + UpdateCurrentUsername(); + if (IsInitialized) { Repository.StatusEntriesChanged += RepositoryOnStatusEntriesChanged; Repository.LocksChanged += RepositoryOnLocksChanged; + Repository.CurrentRemoteChanged += RepositoryOnCurrentRemoteChanged; ValidateCachedData(); } } @@ -83,11 +90,49 @@ private static void RepositoryOnLocksChanged(CacheUpdateEvent cacheUpdateEvent) } } + private static void RepositoryOnCurrentRemoteChanged(CacheUpdateEvent cacheUpdateEvent) + { + if (!lastCurrentRemoteChangedEvent.Equals(cacheUpdateEvent)) + { + lastCurrentRemoteChangedEvent = cacheUpdateEvent; + } + } + + private static void UpdateCurrentUsername() + { + var username = String.Empty; + if (Repository != null) + { + Connection connection; + if (!string.IsNullOrEmpty(Repository.CloneUrl)) + { + var host = Repository.CloneUrl + .ToRepositoryUri() + .GetComponents(UriComponents.Host, UriFormat.SafeUnescaped); + + connection = Platform.Keychain.Connections.FirstOrDefault(x => x.Host == host); + } + else + { + connection = Platform.Keychain.Connections.FirstOrDefault(HostAddress.IsGitHubDotCom); + } + + if (connection != null) + { + username = connection.Username; + } + } + + currentUsername = username; + } + [MenuItem(AssetsMenuRequestLock, true, 10000)] private static bool ContextMenu_CanLock() { if (!EnsureInitialized()) return false; + if (!Repository.CurrentRemote.HasValue) + return false; if (isBusy) return false; return Selection.objects.Any(IsObjectUnlocked); @@ -98,9 +143,11 @@ private static bool ContextMenu_CanUnlock() { if (!EnsureInitialized()) return false; + if (!Repository.CurrentRemote.HasValue) + return false; if (isBusy) return false; - return Selection.objects.Any(IsObjectLocked); + return Selection.objects.Any(f => IsObjectLocked(f , true)); } [MenuItem(AssetsMenuReleaseLockForced, true, 10002)] @@ -108,6 +155,8 @@ private static bool ContextMenu_CanUnlockForce() { if (!EnsureInitialized()) return false; + if (!Repository.CurrentRemote.HasValue) + return false; if (isBusy) return false; return Selection.objects.Any(IsObjectLocked); @@ -119,7 +168,7 @@ private static void ContextMenu_Lock() RunLockUnlock(IsObjectUnlocked, CreateLockObjectTask, Localization.RequestLockActionTitle, "Failed to lock: no permissions"); } - [MenuItem(AssetsMenuReleaseLockForced, false, 10001)] + [MenuItem(AssetsMenuReleaseLock, false, 10001)] private static void ContextMenu_Unlock() { RunLockUnlock(IsObjectLocked, x => CreateUnlockObjectTask(x, false), Localization.ReleaseLockActionTitle, "Failed to unlock: no permissions"); @@ -174,6 +223,11 @@ private static bool IsObjectUnlocked(Object selected) } private static bool IsObjectLocked(Object selected) + { + return IsObjectLocked(selected, false); + } + + private static bool IsObjectLocked(Object selected, bool isLockedByCurrentUser) { if (selected == null) return false; @@ -181,7 +235,7 @@ private static bool IsObjectLocked(Object selected) NPath assetPath = AssetDatabase.GetAssetPath(selected.GetInstanceID()).ToNPath(); NPath repositoryPath = manager.Environment.GetRepositoryPath(assetPath); - return locks.Any(x => repositoryPath == x.Path); + return locks.Any(x => repositoryPath == x.Path && (!isLockedByCurrentUser || x.Owner.Name == currentUsername)); } private static ITask CreateUnlockObjectTask(Object selected, bool force) @@ -216,7 +270,15 @@ private static void OnLocksUpdate() guidsLocks.Add(g); } - EditorApplication.RepaintProjectWindow(); + // https://github.com/github-for-unity/Unity/pull/959#discussion_r236694800 + // We need to repaint not only the project window, but also the inspector. + // so that we can show the "this thing is locked by X" and that the IsOpenForEdit call happens + // and the inspector is disabled. There's no way to refresh the editor directly + // (well, there is, but it's an internal api), so this just causes Unity to repaint everything. + // Nail, meet bazooka, unfortunately, but that's the only way to do it with public APIs ¯_(ツ)_/¯ + + //EditorApplication.RepaintProjectWindow(); + UnityEditorInternal.InternalEditorUtility.RepaintAllViews(); } private static void OnStatusUpdate() @@ -229,7 +291,7 @@ private static void OnStatusUpdate() guids.Add(guid); } - EditorApplication.RepaintProjectWindow(); + AssetDatabase.Refresh(); } private static void OnProjectWindowItemGUI(string guid, Rect itemRect) @@ -283,6 +345,7 @@ private static void OnProjectWindowItemGUI(string guid, Rect itemRect) { var scale = itemRect.height / 90f; var size = new Vector2(texture.width * scale, texture.height * scale); + size = size / EditorGUIUtility.pixelsPerPoint; var offset = new Vector2(itemRect.width * Mathf.Min(.4f * scale, .2f), itemRect.height * Mathf.Min(.2f * scale, .2f)); rect = new Rect(itemRect.center.x - size.x * .5f + offset.x, itemRect.center.y - size.y * .5f + offset.y, size.x, size.y); } diff --git a/src/UnityExtension/Assets/Editor/GitHub.Unity/UI/PublishView.cs b/src/UnityExtension/Assets/Editor/GitHub.Unity/UI/PublishView.cs index 65ef5fb71..832cfa7c7 100644 --- a/src/UnityExtension/Assets/Editor/GitHub.Unity/UI/PublishView.cs +++ b/src/UnityExtension/Assets/Editor/GitHub.Unity/UI/PublishView.cs @@ -22,7 +22,10 @@ class PublishView : Subview private const string PublishLimitPrivateRepositoriesError = "You are currently at your limit of private repositories"; private const string PublishToGithubLabel = "Publish to GitHub"; - [SerializeField] private string username; + [SerializeField] private Connection[] connections; + [SerializeField] private string[] connectionLabels; + [SerializeField] private int selectedConnection; + [SerializeField] private string[] owners = { OwnersDefaultText }; [SerializeField] private string[] publishOwners; [SerializeField] private int selectedOwner; @@ -30,39 +33,18 @@ class PublishView : Subview [SerializeField] private string repoDescription = ""; [SerializeField] private bool togglePrivate; - [NonSerialized] private IApiClient client; + [NonSerialized] private Dictionary clients = new Dictionary(); + [NonSerialized] private IApiClient selectedClient; [NonSerialized] private bool isBusy; [NonSerialized] private string error; + [NonSerialized] private bool connectionsNeedLoading; [NonSerialized] private bool ownersNeedLoading; - public IApiClient Client - { - get - { - if (client == null) - { - var repository = Environment.Repository; - UriString host; - if (repository != null && !string.IsNullOrEmpty(repository.CloneUrl)) - { - host = repository.CloneUrl.ToRepositoryUrl(); - } - else - { - host = UriString.ToUriString(HostAddress.GitHubDotComHostAddress.WebUri); - } - - client = new ApiClient(host, Platform.Keychain, Manager.ProcessManager, TaskManager, Environment.NodeJsExecutablePath, Environment.OctorunScriptPath); - } - - return client; - } - } - public override void OnEnable() { base.OnEnable(); ownersNeedLoading = publishOwners == null && !isBusy; + connectionsNeedLoading = connections == null && !isBusy; } public override void OnDataUpdate() @@ -73,6 +55,17 @@ public override void OnDataUpdate() private void MaybeUpdateData() { + if (connectionsNeedLoading) + { + connectionsNeedLoading = false; + connections = Platform.Keychain.Connections.OrderByDescending(HostAddress.IsGitHubDotCom).ToArray(); + connectionLabels = connections.Select(c => HostAddress.IsGitHubDotCom(c) ? "GitHub" : c.Host.ToUriString().Host).ToArray(); + + var connection = connections.First(); + selectedConnection = 0; + selectedClient = GetApiClient(connection); + } + if (ownersNeedLoading) { ownersNeedLoading = false; @@ -80,6 +73,20 @@ private void MaybeUpdateData() } } + private IApiClient GetApiClient(Connection connection) + { + IApiClient client; + + if (!clients.TryGetValue(connection.Host, out client)) + { + client = new ApiClient(Platform.Keychain, Platform.ProcessManager, TaskManager, Environment, connection.Host); + + clients.Add(connection.Host, client); + } + + return client; + } + public override void InitializeView(IView parent) { base.InitializeView(parent); @@ -89,22 +96,16 @@ public override void InitializeView(IView parent) private void LoadOwners() { - var keychainConnections = Platform.Keychain.Connections; - //TODO: ONE_USER_LOGIN This assumes only ever one user can login - isBusy = true; - //TODO: ONE_USER_LOGIN This assumes only ever one user can login - username = keychainConnections.First().Username; - - Client.GetOrganizations(orgs => + selectedClient.GetOrganizations(orgs => { publishOwners = orgs .OrderBy(organization => organization.Login) .Select(organization => organization.Login) .ToArray(); - owners = new[] { OwnersDefaultText, username }.Union(publishOwners).ToArray(); + owners = new[] { OwnersDefaultText, connections[selectedConnection].Username }.Union(publishOwners).ToArray(); isBusy = false; @@ -129,12 +130,26 @@ public override void OnGUI() { GUILayout.BeginHorizontal(Styles.AuthHeaderBoxStyle); { - GUILayout.Label(PublishToGithubLabel, EditorStyles.boldLabel); + GUILayout.Label(PublishToGithubLabel, EditorStyles.boldLabel); } GUILayout.EndHorizontal(); EditorGUI.BeginDisabledGroup(isBusy); { + if (connections.Length > 1) + { + EditorGUI.BeginChangeCheck(); + { + selectedConnection = EditorGUILayout.Popup("Connections:", selectedConnection, connectionLabels); + } + if (EditorGUI.EndChangeCheck()) + { + selectedClient = GetApiClient(connections[selectedConnection]); + ownersNeedLoading = true; + Redraw(); + } + } + selectedOwner = EditorGUILayout.Popup(SelectedOwnerLabel, selectedOwner, owners); repoName = EditorGUILayout.TextField(RepositoryNameLabel, repoName); repoDescription = EditorGUILayout.TextField(DescriptionLabel, repoDescription); @@ -153,12 +168,12 @@ public override void OnGUI() GUI.FocusControl(null); isBusy = true; - var organization = owners[selectedOwner] == username ? null : owners[selectedOwner]; + var organization = owners[selectedOwner] == connections[selectedConnection].Username ? null : owners[selectedOwner]; var cleanRepoDescription = repoDescription.Trim(); cleanRepoDescription = string.IsNullOrEmpty(cleanRepoDescription) ? null : cleanRepoDescription; - Client.CreateRepository(repoName, cleanRepoDescription, togglePrivate, (repository, ex) => + selectedClient.CreateRepository(repoName, cleanRepoDescription, togglePrivate, (repository, ex) => { if (ex != null) { @@ -202,7 +217,7 @@ private string GetPublishErrorMessage(Exception ex) { return PublishLimitPrivateRepositoriesError; } - + return ex.Message; } diff --git a/src/UnityExtension/Assets/Editor/GitHub.Unity/UI/Subview.cs b/src/UnityExtension/Assets/Editor/GitHub.Unity/UI/Subview.cs index c3c3f1514..4fbfb81e8 100644 --- a/src/UnityExtension/Assets/Editor/GitHub.Unity/UI/Subview.cs +++ b/src/UnityExtension/Assets/Editor/GitHub.Unity/UI/Subview.cs @@ -92,6 +92,7 @@ public virtual void DoneRefreshing() protected IEnvironment Environment { get { return Manager.Environment; } } protected IPlatform Platform { get { return Manager.Platform; } } protected IUsageTracker UsageTracker { get { return Manager.UsageTracker; } } + protected IOAuthCallbackManager OAuthCallbackManager { get { return Manager.OAuthCallbackManager; } } public bool HasFocus { get { return Parent != null && Parent.HasFocus; } } public virtual bool IsBusy diff --git a/src/UnityExtension/Assets/Editor/GitHub.Unity/UI/TreeControl.cs b/src/UnityExtension/Assets/Editor/GitHub.Unity/UI/TreeControl.cs index 206f7edbb..3dc82e14c 100644 --- a/src/UnityExtension/Assets/Editor/GitHub.Unity/UI/TreeControl.cs +++ b/src/UnityExtension/Assets/Editor/GitHub.Unity/UI/TreeControl.cs @@ -660,14 +660,13 @@ protected Texture GetNodeIcon(TreeNode node) return nodeIcon; } - protected override TreeNode CreateTreeNode(string path, string label, int level, bool isFolder, bool isActive, bool isHidden, bool isCollapsed, bool isChecked, GitBranchTreeData? treeData, bool isContainer) + protected override TreeNode CreateTreeNode(string path, string label, int level, bool isFolder, bool isActive, bool isHidden, bool isCollapsed, bool isChecked, GitBranchTreeData? treeData) { var node = new TreeNode { Path = path, Label = label, Level = level, IsFolder = isFolder, - IsContainer = isContainer, IsActive = isActive, IsHidden = isHidden, IsCollapsed = isCollapsed, diff --git a/src/UnityExtension/Assets/Editor/GitHub.Unity/UI/Window.cs b/src/UnityExtension/Assets/Editor/GitHub.Unity/UI/Window.cs index 53a757910..bfe6b5d70 100644 --- a/src/UnityExtension/Assets/Editor/GitHub.Unity/UI/Window.cs +++ b/src/UnityExtension/Assets/Editor/GitHub.Unity/UI/Window.cs @@ -16,6 +16,7 @@ class Window : BaseWindow [NonSerialized] private Spinner spinner; [NonSerialized] private IProgress repositoryProgress; [NonSerialized] private IProgress appManagerProgress; + [NonSerialized] private bool firstOnGUI = true; [SerializeField] private double progressMessageClearTime = -1; [SerializeField] private double notificationClearTime = -1; @@ -40,6 +41,7 @@ class Window : BaseWindow [SerializeField] private int statusAhead; [SerializeField] private int statusBehind; [SerializeField] private bool hasItemsToCommit; + [SerializeField] private bool isTrackingRemoteBranch; [SerializeField] private GUIContent currentBranchContent; [SerializeField] private GUIContent currentRemoteUrlContent; [SerializeField] private CacheUpdateEvent lastCurrentBranchAndRemoteChangedEvent; @@ -54,7 +56,8 @@ class Window : BaseWindow [SerializeField] private string repositoryProgressMessage; [SerializeField] private float appManagerProgressValue; [SerializeField] private string appManagerProgressMessage; - [SerializeField] private Connection connection; + [SerializeField] private Connection[] connections; + [SerializeField] private string primaryConnectionUsername; [MenuItem(Menu_Window_GitHub)] public static void Window_GitHub() @@ -111,8 +114,6 @@ public override void Initialize(IApplicationManager applicationManager) LocksView.InitializeView(this); InitProjectView.InitializeView(this); - titleContent = new GUIContent(Title, Styles.SmallLogo); - if (!HasRepository) { changeTab = activeTab = SubTab.InitProject; @@ -212,7 +213,49 @@ private void ValidateCachedData(IRepository repository) private void MaybeUpdateData() { - connection = Platform.Keychain.Connections.FirstOrDefault(); + if (firstOnGUI) + { + titleContent = new GUIContent(Title, Styles.SmallLogo); + } + firstOnGUI = false; + + if (HasRepository && !string.IsNullOrEmpty(Repository.CloneUrl)) + { + var host = Repository.CloneUrl + .ToRepositoryUri() + .GetComponents(UriComponents.Host, UriFormat.SafeUnescaped); + + connections = Platform.Keychain.Connections.OrderByDescending(x => x.Host == host).ToArray(); + } + else + { + connections = Platform.Keychain.Connections.OrderByDescending(HostAddress.IsGitHubDotCom).ToArray(); + } + + var connectionCount = connections.Length; + if (connectionCount > 1) + { + var connection = connections.First(); + var isGitHubDotCom = HostAddress.IsGitHubDotCom(connection); + + if (isGitHubDotCom) + { + primaryConnectionUsername = "GitHub: " + connection.Username; + } + else + { + primaryConnectionUsername = connection.Host + ": " + connection.Username; + } + } + else if(connectionCount == 1) + { + primaryConnectionUsername = connections.First().Username; + } + else + { + primaryConnectionUsername = null; + } + if (repositoryProgressHasUpdate) { @@ -279,7 +322,17 @@ private void MaybeUpdateData() currentBranchAndRemoteHasUpdate = false; var repositoryCurrentBranch = Repository.CurrentBranch; - var updatedRepoBranch = repositoryCurrentBranch.HasValue ? repositoryCurrentBranch.Value.Name : null; + string updatedRepoBranch; + if (repositoryCurrentBranch.HasValue) + { + updatedRepoBranch = repositoryCurrentBranch.Value.Name; + isTrackingRemoteBranch = !string.IsNullOrEmpty(repositoryCurrentBranch.Value.Tracking); + } + else + { + updatedRepoBranch = null; + isTrackingRemoteBranch = false; + } var repositoryCurrentRemote = Repository.CurrentRemote; if (repositoryCurrentRemote.HasValue) @@ -294,7 +347,7 @@ private void MaybeUpdateData() if (currentRemoteName != updatedRepoRemote) { - currentRemoteName = updatedRepoBranch; + currentRemoteName = updatedRepoRemote; shouldUpdateContentFields = true; } @@ -313,6 +366,8 @@ private void MaybeUpdateData() } else { + isTrackingRemoteBranch = false; + if (currentRemoteName != null) { currentRemoteName = null; @@ -555,6 +610,24 @@ private void DoToolbarGUI() } GUILayout.FlexibleSpace(); + + if (!HasRepository) + { + GUILayout.FlexibleSpace(); + + if (!connections.Any()) + { + if (GUILayout.Button("Sign in", EditorStyles.toolbarButton)) + SignIn(null); + } + else + { + if (GUILayout.Button(primaryConnectionUsername, EditorStyles.toolbarDropDown)) + { + DoAccountDropdown(); + } + } + } } EditorGUILayout.EndHorizontal(); } @@ -591,7 +664,7 @@ private void DoActionbarGUI() EditorGUI.EndDisabledGroup(); // Push button - EditorGUI.BeginDisabledGroup(currentRemoteName == null || statusAhead == 0); + EditorGUI.BeginDisabledGroup(currentRemoteName == null || isTrackingRemoteBranch && statusAhead == 0); { var pushButtonText = statusAhead > 0 ? new GUIContent(String.Format(Localization.PushButtonCount, statusAhead)) : pushButtonContent; var pushClicked = GUILayout.Button(pushButtonText, Styles.ToolbarButtonStyle); @@ -624,13 +697,14 @@ private void DoActionbarGUI() GUILayout.FlexibleSpace(); - if (connection == null) + if (!connections.Any()) { if (GUILayout.Button("Sign in", EditorStyles.toolbarButton)) SignIn(null); } else { + var connection = connections.First(); if (GUILayout.Button(connection.Username, EditorStyles.toolbarDropDown)) { DoAccountDropdown(); @@ -711,6 +785,8 @@ private void Pull() EditorUtility.DisplayDialog(Localization.PullActionTitle, String.Format(Localization.PullSuccessDescription, currentRemoteName), Localization.Ok); + + AssetDatabase.Refresh(); } else { @@ -800,9 +876,38 @@ private void SwitchView(Subview fromView, Subview toView) private void DoAccountDropdown() { GenericMenu accountMenu = new GenericMenu(); - accountMenu.AddItem(new GUIContent("Go to Profile"), false, GoToProfile, "profile"); - accountMenu.AddSeparator(""); - accountMenu.AddItem(new GUIContent("Sign out"), false, SignOut, "sign out"); + + if (connections.Length == 1) + { + var connection = connections.First(); + accountMenu.AddItem(new GUIContent("Go to Profile"), false, GoToProfile, connection); + accountMenu.AddItem(new GUIContent("Sign out"), false, SignOut, connection); + accountMenu.AddSeparator(""); + accountMenu.AddItem(new GUIContent("Sign In"), false, SignIn, "sign in"); + } + else + { + for (var index = 0; index < connections.Length; index++) + { + var connection = connections[index]; + var isGitHubDotCom = HostAddress.IsGitHubDotCom(connection); + + string rootPath; + if (isGitHubDotCom) + { + rootPath = "GitHub/"; + } + else + { + var uriString = connection.Host.ToUriString(); + rootPath = uriString.Host + "/"; + } + + accountMenu.AddItem(new GUIContent(rootPath + "Go to Profile"), false, GoToProfile, connection); + accountMenu.AddItem(new GUIContent(rootPath + "Sign out"), false, SignOut, connection); + } + } + accountMenu.ShowAsContext(); } @@ -813,27 +918,16 @@ private void SignIn(object obj) private void GoToProfile(object obj) { - //TODO: ONE_USER_LOGIN This assumes only ever one user can login - var keychainConnection = Platform.Keychain.Connections.First(); - var uriString = new UriString(keychainConnection.Host).Combine(keychainConnection.Username); + var connection = (Connection) obj; + var uriString = new UriString(connection.Host).Combine(connection.Username); Application.OpenURL(uriString); } private void SignOut(object obj) { - UriString host; - if (Repository != null && Repository.CloneUrl != null && Repository.CloneUrl.IsValidUri) - { - host = new UriString(Repository.CloneUrl.ToRepositoryUri() - .GetComponents(UriComponents.SchemeAndServer, UriFormat.SafeUnescaped)); - } - else - { - host = UriString.ToUriString(HostAddress.GitHubDotComHostAddress.WebUri); - } - - var apiClient = new ApiClient(host, Platform.Keychain, Manager.ProcessManager, Manager.TaskManager, Environment.NodeJsExecutablePath, Environment.OctorunScriptPath); - apiClient.Logout(host).FinallyInUI((s, e) => Redraw()); + var connection = (Connection)obj; + var loginManager = new LoginManager(Platform.Keychain, Manager.ProcessManager, Manager.TaskManager, Environment); + loginManager.Logout(connection.Host).FinallyInUI((s, e) => Redraw()); } public new void ShowNotification(GUIContent content) diff --git a/src/UnityExtension/Assets/Editor/UnityTests/UnityTests.asmdef b/src/UnityExtension/Assets/Editor/UnityTests/UnityTests.asmdef index eef024216..d49630a1a 100644 --- a/src/UnityExtension/Assets/Editor/UnityTests/UnityTests.asmdef +++ b/src/UnityExtension/Assets/Editor/UnityTests/UnityTests.asmdef @@ -3,8 +3,16 @@ "references": [ "GitHub.Unity" ], + "optionalUnityReferences": [ + "TestAssemblies" + ], "includePlatforms": [ "Editor" ], - "excludePlatforms": [] + "excludePlatforms": [], + "allowUnsafeCode": false, + "overrideReferences": false, + "precompiledReferences": [], + "autoReferenced": true, + "defineConstraints": [] } \ No newline at end of file diff --git a/src/UnityExtension/Assets/Editor/UnityTests/UnityTests.v3.ncrunchproject b/src/UnityExtension/Assets/Editor/UnityTests/UnityTests.v3.ncrunchproject new file mode 100644 index 000000000..319cd523c --- /dev/null +++ b/src/UnityExtension/Assets/Editor/UnityTests/UnityTests.v3.ncrunchproject @@ -0,0 +1,5 @@ + + + True + + \ No newline at end of file diff --git a/src/UnityExtension/ProjectSettings/ProjectVersion.txt b/src/UnityExtension/ProjectSettings/ProjectVersion.txt new file mode 100644 index 000000000..78fa54032 --- /dev/null +++ b/src/UnityExtension/ProjectSettings/ProjectVersion.txt @@ -0,0 +1 @@ +m_EditorVersion: 2018.3.2f1 diff --git a/src/UnityShim/UnityShim.cs b/src/UnityShim/UnityShim.cs new file mode 100644 index 000000000..3cee6665a --- /dev/null +++ b/src/UnityShim/UnityShim.cs @@ -0,0 +1,14 @@ +using System; +using UnityEditor; +namespace GitHub.Unity +{ + public static class UnityShim + { + public static event Action Editor_finishedDefaultHeaderGUI; + public static void Raise_Editor_finishedDefaultHeaderGUI(Editor editor) + { + if (Editor_finishedDefaultHeaderGUI != null) + Editor_finishedDefaultHeaderGUI(editor); + } + } +} \ No newline at end of file diff --git a/src/UnityShim/UnityShim.csproj b/src/UnityShim/UnityShim.csproj new file mode 100644 index 000000000..6f10a7734 --- /dev/null +++ b/src/UnityShim/UnityShim.csproj @@ -0,0 +1,74 @@ + + + + + Debug + AnyCPU + {F94F8AE1-C171-4A83-89E8-6557CA91A188} + Library + Properties + GitHub.Unity + GitHub.UnityShim + v3.5 + 512 + + 6 + + + ..\UnityExtension\Assets\Editor\build\ + + + + true + full + false + DEBUG;TRACE;$(BuildDefs) + prompt + 4 + false + false + true + + + pdbonly + true + TRACE;$(BuildDefs) + prompt + 4 + Release + false + false + true + + + true + full + false + TRACE;DEBUG;DEVELOPER_BUILD;$(BuildDefs) + prompt + 4 + false + false + true + ..\..\common\codeanalysis-debug.ruleset + + + Debug + + + + + $(UnityDir)Managed\UnityEditor.dll + False + + + $(UnityDir)Managed\UnityEngine.dll + False + + + + + + + + \ No newline at end of file diff --git a/src/packaging/CopyLibrariesToDevelopmentFolder/CopyLibrariesToDevelopmentFolder.csproj b/src/packaging/CopyLibrariesToDevelopmentFolder/CopyLibrariesToDevelopmentFolder.csproj index c02403ad2..40067b0aa 100644 --- a/src/packaging/CopyLibrariesToDevelopmentFolder/CopyLibrariesToDevelopmentFolder.csproj +++ b/src/packaging/CopyLibrariesToDevelopmentFolder/CopyLibrariesToDevelopmentFolder.csproj @@ -9,9 +9,10 @@ Properties deleteme deleteme - v3.5 + v4.5 512 $(SolutionDir)src\UnityExtension\Assets\Editor\build + true @@ -20,6 +21,7 @@ DEBUG;TRACE prompt 4 + false pdbonly @@ -27,12 +29,20 @@ TRACE prompt 4 + false + + + {b389adaf-62cc-486e-85b4-2d8b078df763} GitHub.Api + + {b389adaf-62cc-486e-85b4-2d8b078df76B} + GitHub.Api.45 + {bb6a8eda-15d8-471b-a6ed-ee551e0b3ba0} GitHub.Logging @@ -41,7 +51,6 @@ $(SolutionDir)\lib\sfw\sfw.net.dll True - diff --git a/src/packaging/CopyLibrariesToPackageProject/CopyLibrariesToPackageProject.csproj b/src/packaging/CopyLibrariesToPackageProject/CopyLibrariesToPackageProject.csproj index 179d6dbe3..4f839eeae 100644 --- a/src/packaging/CopyLibrariesToPackageProject/CopyLibrariesToPackageProject.csproj +++ b/src/packaging/CopyLibrariesToPackageProject/CopyLibrariesToPackageProject.csproj @@ -9,9 +9,10 @@ Properties deleteme deleteme - v3.5 + v4.5 512 build + Release @@ -20,6 +21,7 @@ TRACE prompt 4 + false Debug @@ -28,44 +30,66 @@ TRACE prompt 4 + false {b389adaf-62cc-486e-85b4-2d8b078df763} GitHub.Api + + {b389adaf-62cc-486e-85b4-2d8b078df76b} + GitHub.Api.45 + {bb6a8eda-15d8-471b-a6ed-ee551e0b3ba0} GitHub.Logging + + {add7a18b-dd2a-4c22-a2c1-488964eff30b} + GitHub.Unity.45 + {add7a18b-dd2a-4c22-a2c1-488964eff30a} GitHub.Unity True - - $(SolutionDir)\packages\AsyncBridge.Net35.0.2.3333.0\lib\net35-Client\AsyncBridge.Net35.dll - True - - - $(SolutionDir)\packages\ReadOnlyCollectionInterfaces.1.0.0\lib\NET20\ReadOnlyCollectionsInterfaces.dll + + {add7a18b-dd2a-4c22-a2c1-488964eff30b} + GitHub.Unity True - + $(SolutionDir)\lib\sfw\sfw.net.dll True - - $(SolutionDir)\packages\TaskParallelLibrary.1.0.3333.0\lib\Net35\System.Threading.dll - True - + + ExtensionLoader.cs + PreserveNewest + + + UnityAPIWrapper.cs + PreserveNewest + + + ReadOnlyCollectionsInterfaces.dll + PreserveNewest + + + AsyncBridge.Net35.dll + PreserveNewest + + + System.Threading.dll + PreserveNewest + diff --git a/src/tests/CommandLine/CommandLine.csproj b/src/tests/CommandLine/CommandLine.csproj index a5e3b5a0b..2b00d6b80 100644 --- a/src/tests/CommandLine/CommandLine.csproj +++ b/src/tests/CommandLine/CommandLine.csproj @@ -55,10 +55,6 @@ {bb6a8eda-15d8-471b-a6ed-ee551e0b3ba0} GitHub.Logging - - {add7a18b-dd2a-4c22-a2c1-488964eff30a} - GitHub.Unity - {3dd3451c-30fa-4294-a3a9-1e080342f867} TestWebServer diff --git a/src/tests/IntegrationTests/BaseIntegrationTest.cs b/src/tests/IntegrationTests/BaseIntegrationTest.cs index 3041951e2..37b9a6c41 100644 --- a/src/tests/IntegrationTests/BaseIntegrationTest.cs +++ b/src/tests/IntegrationTests/BaseIntegrationTest.cs @@ -62,11 +62,14 @@ protected void InitializeEnvironment(NPath repoPath, cacheContainer.SetCacheInitializer(CacheType.GitUser, () => GitUserCache.Instance); cacheContainer.SetCacheInitializer(CacheType.RepositoryInfo, () => RepositoryInfoCache.Instance); - Environment = new IntegrationTestEnvironment(cacheContainer, - repoPath, - SolutionDirectory, - enableTrace: enableEnvironmentTrace, - initializeRepository: initializeRepository); + var environment = new IntegrationTestEnvironment(cacheContainer, + repoPath, + SolutionDirectory, + enableTrace: enableEnvironmentTrace, + initializeRepository: initializeRepository); + environment.NodeJsExecutablePath = TestApp; + environment.OctorunScriptPath = TestApp; + Environment = environment; } protected void InitializePlatform(NPath repoPath, diff --git a/src/tests/IntegrationTests/CachingClasses.cs b/src/tests/IntegrationTests/CachingClasses.cs index 944c584e1..3c25d210c 100644 --- a/src/tests/IntegrationTests/CachingClasses.cs +++ b/src/tests/IntegrationTests/CachingClasses.cs @@ -109,6 +109,7 @@ abstract class ManagedCacheBase : ScriptObjectSingleton where T : class, I private DateTimeOffset? initializedAtValue; private bool isInvalidating; + protected bool forcedInvalidation; public event Action CacheInvalidated; public event Action CacheUpdated; @@ -134,13 +135,20 @@ public bool ValidateData() public void InvalidateData() { - if (!isInvalidating) - { - Logger.Trace("Invalidate"); - isInvalidating = true; - LastUpdatedAt = DateTimeOffset.MinValue; - CacheInvalidated.SafeInvoke(CacheType); - } + forcedInvalidation = true; + Invalidate(); + } + + private void Invalidate() + { + isInvalidating = true; + LastUpdatedAt = DateTimeOffset.MinValue; + CacheInvalidated.SafeInvoke(CacheType); + } + + public void ResetInvalidation() + { + isInvalidating = false; } protected void SaveData(DateTimeOffset now, bool isChanged) diff --git a/src/tests/IntegrationTests/IOTestsRepo.zip b/src/tests/IntegrationTests/IOTestsRepo.zip index 1b7560cd6..d44348f7b 100644 --- a/src/tests/IntegrationTests/IOTestsRepo.zip +++ b/src/tests/IntegrationTests/IOTestsRepo.zip @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:0e8614b3fb2545993c4539dd3967df29072386c523d4707ec579a91a6a4efaf9 -size 296148 +oid sha256:b32bc4a54fa11844beacd90ff0d4cbf673b0fd720134197aab9f1f772c980c7b +size 296183 diff --git a/src/tests/IntegrationTests/IntegrationTestEnvironment.cs b/src/tests/IntegrationTests/IntegrationTestEnvironment.cs index f85a7b227..4690ac6df 100644 --- a/src/tests/IntegrationTests/IntegrationTestEnvironment.cs +++ b/src/tests/IntegrationTests/IntegrationTestEnvironment.cs @@ -1,5 +1,6 @@ using System; using System.Globalization; +using System.Linq; using GitHub.Unity; using GitHub.Logging; @@ -60,7 +61,7 @@ public void InitializeRepository(NPath? expectedPath = null) public string ExpandEnvironmentVariables(string name) { - return name; + return defaultEnvironment.ExpandEnvironmentVariables(name); } public string GetEnvironmentVariable(string v) @@ -73,6 +74,18 @@ public string GetEnvironmentVariable(string v) return environmentVariable; } + + public string GetEnvironmentVariableKey(string name) + { + return defaultEnvironment.GetEnvironmentVariableKey(name); + } + + private static string GetEnvironmentVariableKeyInternal(string name) + { + return Environment.GetEnvironmentVariables().Keys.Cast() + .FirstOrDefault(k => string.Compare(name, k, true, CultureInfo.InvariantCulture) == 0) ?? name; + } + public string GetSpecialFolder(Environment.SpecialFolder folder) { var ensureDirectoryExists = UserCachePath.Parent.EnsureDirectoryExists(folder.ToString()); @@ -88,7 +101,7 @@ public string GetSpecialFolder(Environment.SpecialFolder folder) public string UserProfilePath => UserCachePath.Parent.CreateDirectory("user profile path"); - public string Path { get; set; } = Environment.GetEnvironmentVariable("PATH").ToNPath(); + public string Path { get; set; } = Environment.GetEnvironmentVariable(GetEnvironmentVariableKeyInternal("PATH")); public string NewLine => Environment.NewLine; public string UnityVersion => "5.6"; @@ -100,7 +113,7 @@ public string GetSpecialFolder(Environment.SpecialFolder folder) public NPath GitLfsExecutablePath => defaultEnvironment.GitLfsExecutablePath; public GitInstaller.GitInstallationState GitInstallationState { get { return defaultEnvironment.GitInstallationState; } set { defaultEnvironment.GitInstallationState = value; } } - public NPath NodeJsExecutablePath => defaultEnvironment.NodeJsExecutablePath; + public NPath NodeJsExecutablePath { get; set; } public NPath OctorunScriptPath { get; set; } @@ -133,5 +146,6 @@ public string GetSpecialFolder(Environment.SpecialFolder folder) public ISettings LocalSettings => defaultEnvironment.LocalSettings; public ISettings SystemSettings => defaultEnvironment.SystemSettings; public ISettings UserSettings => defaultEnvironment.UserSettings; + public IOAuthCallbackManager OAuthCallbackListener { get; } } } diff --git a/src/tests/IntegrationTests/IntegrationTests.csproj b/src/tests/IntegrationTests/IntegrationTests.csproj index 351a6d571..91f33f22c 100644 --- a/src/tests/IntegrationTests/IntegrationTests.csproj +++ b/src/tests/IntegrationTests/IntegrationTests.csproj @@ -40,10 +40,6 @@ $(SolutionDir)packages\FluentAssertions.2.2.0.0\lib\net35\FluentAssertions.dll True - - False - $(SolutionDir)lib\ICSharpCode.SharpZipLib.dll - $(SolutionDir)packages\NCrunch.Framework.3.3.0.6\lib\NCrunch.Framework.dll True @@ -77,8 +73,7 @@ - - + diff --git a/src/tests/IntegrationTests/IntegrationTests.v3.ncrunchproject b/src/tests/IntegrationTests/IntegrationTests.v3.ncrunchproject index 85e05b85b..dd346e243 100644 --- a/src/tests/IntegrationTests/IntegrationTests.v3.ncrunchproject +++ b/src/tests/IntegrationTests/IntegrationTests.v3.ncrunchproject @@ -1,10 +1,10 @@  - ..\..\..\lib\sfw.net\win\x64\Debug\**.* ..\..\GitHub.Api\PlatformResources\**.* - ..\..\..\script\lib\UnityEditor.dll - ..\..\..\script\lib\UnityEngine.dll + ..\..\..\script\lib\Managed\UnityEditor.dll + ..\..\..\script\lib\Managed\UnityEngine.dll + ..\..\..\lib\sfw\win\x64\**.* AbnormalReferenceResolution @@ -16,6 +16,15 @@ IntegrationTests.GitSetupTests.VerifyGitLfsBundle + + IntegrationTests.GitInstallerTestsWithHttp + + + IntegrationTests.GitInstallerTests + + + IntegrationTests.A_GitClientTests + True True diff --git a/src/tests/IntegrationTests/Metrics/MetricsTests.cs b/src/tests/IntegrationTests/Metrics/MetricsTests.cs index 3d7fa1bdb..34aaa42ad 100644 --- a/src/tests/IntegrationTests/Metrics/MetricsTests.cs +++ b/src/tests/IntegrationTests/Metrics/MetricsTests.cs @@ -110,7 +110,7 @@ public void SubmissionWorks() storePath.WriteAllText(savedStore.ToJson(lowerCase: true)); settings.Get(Arg.Is(Constants.MetricsKey), Arg.Any()).Returns(true); - var metricsService = new MetricsService(ProcessManager, TaskManager, Environment.FileSystem, TestApp, TestApp); + var metricsService = new MetricsService(ProcessManager, TaskManager, Platform.Keychain, Environment); usageTracker.MetricsService = metricsService; var method = usageTracker.GetType().GetMethod("SendUsage", System.Reflection.BindingFlags.NonPublic | System.Reflection.BindingFlags.Instance); method.Invoke(usageTracker, null); diff --git a/src/tests/IntegrationTests/Process/ProcessManagerIntegrationTests.cs b/src/tests/IntegrationTests/Process/ProcessManagerIntegrationTests.cs index bdd418605..f817a1ad7 100644 --- a/src/tests/IntegrationTests/Process/ProcessManagerIntegrationTests.cs +++ b/src/tests/IntegrationTests/Process/ProcessManagerIntegrationTests.cs @@ -13,6 +13,7 @@ namespace IntegrationTests class ProcessManagerIntegrationTests : BaseGitEnvironmentTest { [Test] + [Category("DoNotRunOnAppVeyor")] public async Task BranchListTest() { InitializePlatformAndEnvironment(TestRepoMasterCleanUnsynchronized); @@ -23,7 +24,7 @@ public async Task BranchListTest() .StartAsAsync(); gitBranches.Should().BeEquivalentTo( - new GitBranch("master", "origin/master: behind 1"), + new GitBranch("master", "origin/master"), new GitBranch("feature/document", "origin/feature/document")); } @@ -34,7 +35,7 @@ public async Task LogEntriesTest() List logEntries = null; logEntries = await ProcessManager - .GetGitLogEntries(TestRepoMasterCleanUnsynchronized, Environment, GitEnvironment, 2) + .GetGitLogEntries(TestRepoMasterCleanUnsynchronized, Environment, 2) .StartAsAsync(); var firstCommitTime = new DateTimeOffset(2017, 1, 27, 17, 19, 32, TimeSpan.FromHours(-5)); @@ -45,25 +46,27 @@ public async Task LogEntriesTest() new GitLogEntry("018997938335742f8be694240a7c2b352ec0835f", "Author Person", "author@example.com", "Author Person", "author@example.com", - "Moving project files where they should be kept", - "Moving project files where they should be kept", firstCommitTime, + "Moving project files where they should be kept", + "", + firstCommitTime, firstCommitTime, new List { new GitStatusEntry("Assets/TestDocument.txt".ToNPath(), TestRepoMasterCleanUnsynchronized + "/Assets/TestDocument.txt".ToNPath(), "Assets/TestDocument.txt".ToNPath(), - GitFileStatus.Renamed, "TestDocument.txt") + GitFileStatus.Renamed, GitFileStatus.None, "TestDocument.txt") }), new GitLogEntry("03939ffb3eb8486dba0259b43db00842bbe6eca1", "Author Person", "author@example.com", "Author Person", "author@example.com", "Initial Commit", - "Initial Commit", secondCommitTime, + "", + secondCommitTime, secondCommitTime, new List { new GitStatusEntry("TestDocument.txt".ToNPath(), TestRepoMasterCleanUnsynchronized + "/TestDocument.txt".ToNPath(), "TestDocument.txt".ToNPath(), - GitFileStatus.Added), + GitFileStatus.Added, GitFileStatus.None), }), }); } @@ -75,7 +78,7 @@ public async Task RussianLogEntriesTest() List logEntries = null; logEntries = await ProcessManager - .GetGitLogEntries(TestRepoMasterCleanUnsynchronizedRussianLanguage, Environment, GitEnvironment, 1) + .GetGitLogEntries(TestRepoMasterCleanUnsynchronizedRussianLanguage, Environment, 1) .StartAsAsync(); var commitTime = new DateTimeOffset(2017, 4, 20, 11, 47, 18, TimeSpan.FromHours(-4)); @@ -86,12 +89,13 @@ public async Task RussianLogEntriesTest() "Author Person", "author@example.com", "Author Person", "author@example.com", "Я люблю github", - "Я люблю github", commitTime, + "", + commitTime, commitTime, new List { new GitStatusEntry(@"Assets\A new file.txt".ToNPath(), TestRepoMasterCleanUnsynchronizedRussianLanguage + "/Assets/A new file.txt".ToNPath(), "Assets/A new file.txt".ToNPath(), - GitFileStatus.Added), + GitFileStatus.Added, GitFileStatus.None), }), }); } @@ -116,7 +120,7 @@ public async Task StatusTest() GitStatus? gitStatus = null; gitStatus = await ProcessManager - .GetGitStatus(TestRepoMasterDirtyUnsynchronized, Environment, GitEnvironment) + .GetGitStatus(TestRepoMasterDirtyUnsynchronized, Environment) .StartAsAsync(); gitStatus.Value.AssertEqual(new GitStatus() @@ -129,17 +133,17 @@ public async Task StatusTest() new GitStatusEntry("Assets/Added Document.txt".ToNPath(), TestRepoMasterDirtyUnsynchronized.Combine("Assets/Added Document.txt"), "Assets/Added Document.txt".ToNPath(), - GitFileStatus.Added, staged: true), + GitFileStatus.Added, GitFileStatus.None), new GitStatusEntry("Assets/Renamed TestDocument.txt".ToNPath(), TestRepoMasterDirtyUnsynchronized.Combine("Assets/Renamed TestDocument.txt"), "Assets/Renamed TestDocument.txt".ToNPath(), - GitFileStatus.Renamed, "Assets/TestDocument.txt".ToNPath(), true), + GitFileStatus.Renamed, GitFileStatus.None, "Assets/TestDocument.txt".ToNPath()), new GitStatusEntry("Assets/Untracked Document.txt".ToNPath(), TestRepoMasterDirtyUnsynchronized.Combine("Assets/Untracked Document.txt"), "Assets/Untracked Document.txt".ToNPath(), - GitFileStatus.Untracked), + GitFileStatus.Untracked, GitFileStatus.Untracked), } }); } @@ -150,7 +154,7 @@ public async Task CredentialHelperGetTest() InitializePlatformAndEnvironment(TestRepoMasterCleanSynchronized); await ProcessManager - .GetGitCreds(TestRepoMasterCleanSynchronized, Environment, GitEnvironment) + .GetGitCreds(TestRepoMasterCleanSynchronized) .StartAsAsync(); } } diff --git a/src/tests/IntegrationTests/ProcessManagerExtensions.cs b/src/tests/TestUtils/Helpers/ProcessManagerExtensions.cs similarity index 59% rename from src/tests/IntegrationTests/ProcessManagerExtensions.cs rename to src/tests/TestUtils/Helpers/ProcessManagerExtensions.cs index 622006c8b..aa09e5b55 100644 --- a/src/tests/IntegrationTests/ProcessManagerExtensions.cs +++ b/src/tests/TestUtils/Helpers/ProcessManagerExtensions.cs @@ -3,8 +3,9 @@ using System.Text; using System.Threading; using System.Threading.Tasks; +using GitHub.Unity.Git.Tasks; -namespace IntegrationTests +namespace TestUtils { static class ProcessManagerExtensions { @@ -15,47 +16,35 @@ public static ITask> GetGitBranches(this IProcessManager process NPath? gitPath = null) { var processor = new BranchListOutputProcessor(); - NPath path = gitPath ?? defaultGitPath; - return new ProcessTaskWithListOutput(CancellationToken.None, processor) - .Configure(processManager, path, "branch -vv", workingDirectory, false); + return new GitListLocalBranchesTask(CancellationToken.None, processor) + .Configure(processManager, gitPath ?? defaultGitPath, workingDirectory: workingDirectory); } public static ITask> GetGitLogEntries(this IProcessManager processManager, NPath workingDirectory, - IEnvironment environment, IProcessEnvironment gitEnvironment, - int? logCount = null, + IEnvironment environment, + int logCount = 0, NPath? gitPath = null) { var gitStatusEntryFactory = new GitObjectFactory(environment); var processor = new LogEntryOutputProcessor(gitStatusEntryFactory); - var logNameStatus = @"log --pretty=format:""%H%n%P%n%aN%n%aE%n%aI%n%cN%n%cE%n%cI%n%B---GHUBODYEND---"" --name-status"; - - if (logCount.HasValue) - { - logNameStatus = logNameStatus + " -" + logCount.Value; - } - - NPath path = gitPath ?? defaultGitPath; - - return new ProcessTaskWithListOutput(CancellationToken.None, processor) - .Configure(processManager, path, logNameStatus, workingDirectory, false); + return new GitLogTask(logCount, null, CancellationToken.None, processor) + .Configure(processManager, gitPath ?? defaultGitPath, workingDirectory: workingDirectory); } public static ITask GetGitStatus(this IProcessManager processManager, NPath workingDirectory, - IEnvironment environment, IProcessEnvironment gitEnvironment, + IEnvironment environment, NPath? gitPath = null) { var gitStatusEntryFactory = new GitObjectFactory(environment); var processor = new GitStatusOutputProcessor(gitStatusEntryFactory); - NPath path = gitPath ?? defaultGitPath; - - return new ProcessTask(CancellationToken.None, processor) - .Configure(processManager, path, "status -b -u --porcelain", workingDirectory, false); + return new GitStatusTask(null, CancellationToken.None, processor) + .Configure(processManager, workingDirectory: workingDirectory); } public static ITask> GetGitRemoteEntries(this IProcessManager processManager, @@ -64,15 +53,12 @@ public static ITask> GetGitRemoteEntries(this IProcessManager pr { var processor = new RemoteListOutputProcessor(); - NPath path = gitPath ?? defaultGitPath; - - return new ProcessTaskWithListOutput(CancellationToken.None, processor) - .Configure(processManager, path, "remote -v", workingDirectory, false); + return new GitRemoteListTask(CancellationToken.None, processor) + .Configure(processManager, gitPath ?? defaultGitPath, workingDirectory: workingDirectory); } public static ITask GetGitCreds(this IProcessManager processManager, NPath workingDirectory, - IEnvironment environment, IProcessEnvironment gitEnvironment, NPath? gitPath = null) { var processor = new FirstNonNullLineOutputProcessor(); diff --git a/src/tests/TestUtils/Substitutes/SubstituteFactory.cs b/src/tests/TestUtils/Substitutes/SubstituteFactory.cs index 29e1690cc..13e2b32cf 100644 --- a/src/tests/TestUtils/Substitutes/SubstituteFactory.cs +++ b/src/tests/TestUtils/Substitutes/SubstituteFactory.cs @@ -310,15 +310,14 @@ public IGitObjectFactory CreateGitObjectFactory(string gitRepoPath) { var gitObjectFactory = Substitute.For(); - gitObjectFactory.CreateGitStatusEntry(Args.String, Args.GitFileStatus, Args.String, Args.Bool) + gitObjectFactory.CreateGitStatusEntry(Args.String, Args.GitFileStatus, Args.GitFileStatus, Args.String) .Returns(info => { var path = (string)info[0]; - var status = (GitFileStatus)info[1]; - var originalPath = (string)info[2]; - var staged = (bool)info[3]; + var indexStatus = (GitFileStatus)info[1]; + var workTreeStatus = (GitFileStatus)info[2]; + var originalPath = (string)info[3]; - return new GitStatusEntry(path, gitRepoPath + @"\" + path, null, status, originalPath, - staged); + return new GitStatusEntry(path, gitRepoPath + @"\" + path, null, indexStatus, workTreeStatus, originalPath); }); return gitObjectFactory; diff --git a/src/tests/TestUtils/TestUtils.csproj b/src/tests/TestUtils/TestUtils.csproj index 8ae36c41d..bb3d2857f 100644 --- a/src/tests/TestUtils/TestUtils.csproj +++ b/src/tests/TestUtils/TestUtils.csproj @@ -62,6 +62,7 @@ + diff --git a/src/tests/TestWebServer/HttpServer.cs b/src/tests/TestWebServer/HttpServer.cs index 8f4765abb..d2c824b85 100644 --- a/src/tests/TestWebServer/HttpServer.cs +++ b/src/tests/TestWebServer/HttpServer.cs @@ -8,6 +8,7 @@ using System.Net.Sockets; using System.Text; using System.Threading; +using Newtonsoft.Json; namespace TestWebServer { @@ -82,7 +83,7 @@ public void Start() var thread = new Thread(p => Process((HttpListenerContext)p)); thread.Start(context); } - catch (Exception ex) + catch (Exception) { break; } @@ -106,6 +107,18 @@ private void Process(HttpListenerContext context) if (context.Request.Url.AbsolutePath == "/api/usage/unity") { + var streamReader = new StreamReader(context.Request.InputStream); + string body = null; + using (streamReader) + { + body = streamReader.ReadToEnd(); + } + + var parsedJson = JsonConvert.DeserializeObject(body); + var formattedJson = JsonConvert.SerializeObject(parsedJson, Formatting.Indented); + + Logger.Info(formattedJson); + var json = new { result = "Cool unity usage" }.ToJson(); context.Response.StatusCode = (int)HttpStatusCode.OK; context.Response.ContentLength64 = json.Length; diff --git a/src/tests/TestWebServer/TestWebServer.csproj b/src/tests/TestWebServer/TestWebServer.csproj index c8d6cffad..8cc2ce5f1 100644 --- a/src/tests/TestWebServer/TestWebServer.csproj +++ b/src/tests/TestWebServer/TestWebServer.csproj @@ -31,6 +31,9 @@ 4 + + ..\..\..\packages\Newtonsoft.Json.11.0.2\lib\net35\Newtonsoft.Json.dll + @@ -81,6 +84,7 @@ PreserveNewest + diff --git a/src/tests/TestWebServer/TestWebServer.v3.ncrunchproject b/src/tests/TestWebServer/TestWebServer.v3.ncrunchproject new file mode 100644 index 000000000..fc42a761c --- /dev/null +++ b/src/tests/TestWebServer/TestWebServer.v3.ncrunchproject @@ -0,0 +1,8 @@ + + + + ..\..\..\script\lib\Managed\UnityEditor.dll + ..\..\..\script\lib\Managed\UnityEngine.dll + + + \ No newline at end of file diff --git a/src/tests/TestWebServer/packages.config b/src/tests/TestWebServer/packages.config new file mode 100644 index 000000000..9eecb30ef --- /dev/null +++ b/src/tests/TestWebServer/packages.config @@ -0,0 +1,4 @@ + + + + \ No newline at end of file diff --git a/src/tests/UnitTests/Authentication/KeychainTests.cs b/src/tests/UnitTests/Authentication/KeychainTests.cs index 4c5a1e3a0..5e077ef1f 100644 --- a/src/tests/UnitTests/Authentication/KeychainTests.cs +++ b/src/tests/UnitTests/Authentication/KeychainTests.cs @@ -176,7 +176,7 @@ public void ShouldLoadFromConnectionManager() fileSystem.DidNotReceive().WriteAllLines(Args.String, Arg.Any()); var uriString = keychain.Hosts.FirstOrDefault(); - var keychainAdapter = keychain.Load(uriString); + var keychainAdapter = keychain.LoadFromSystem(uriString); keychainAdapter.Credential.Username.Should().Be(username); keychainAdapter.Credential.Token.Should().Be(token); keychainAdapter.Credential.Host.Should().Be(hostUri); @@ -222,7 +222,7 @@ public void ShouldDeleteFromCacheWhenLoadReturnsNullFromConnectionManager() fileSystem.ClearReceivedCalls(); var uriString = keychain.Hosts.FirstOrDefault(); - var keychainAdapter = keychain.Load(uriString); + var keychainAdapter = keychain.LoadFromSystem(uriString); keychainAdapter.Should().BeNull(); fileSystem.DidNotReceive().FileExists(Args.String); @@ -281,21 +281,21 @@ public void ShouldConnectSetCredentialsTokenAndSave() keychainAdapter.Credential.Should().BeNull(); - keychain.SetCredentials(new Credential(hostUri, username, password)); + keychainAdapter.Set(new Credential(hostUri, username, password)); keychainAdapter.Credential.Should().NotBeNull(); keychainAdapter.Credential.Host.Should().Be(hostUri); keychainAdapter.Credential.Username.Should().Be(username); keychainAdapter.Credential.Token.Should().Be(password); - keychain.SetToken(hostUri, token, username); + keychainAdapter.Update(token, username); keychainAdapter.Credential.Should().NotBeNull(); keychainAdapter.Credential.Host.Should().Be(hostUri); keychainAdapter.Credential.Username.Should().Be(username); keychainAdapter.Credential.Token.Should().Be(token); - keychain.Save(hostUri); + keychain.SaveToSystem(hostUri); fileSystem.DidNotReceive().FileExists(Args.String); fileSystem.DidNotReceive().FileDelete(Args.String); @@ -352,7 +352,7 @@ public void ShouldConnectSetCredentialsAndClear() keychainAdapter.Credential.Should().BeNull(); - keychain.SetCredentials(new Credential(hostUri, username, password)); + keychainAdapter.Set(new Credential(hostUri, username, password)); keychainAdapter.Credential.Should().NotBeNull(); keychainAdapter.Credential.Host.Should().Be(hostUri); diff --git a/src/tests/UnitTests/IO/BranchListOutputProcessorTests.cs b/src/tests/UnitTests/IO/BranchListOutputProcessorTests.cs index f22cc237a..6041652f2 100644 --- a/src/tests/UnitTests/IO/BranchListOutputProcessorTests.cs +++ b/src/tests/UnitTests/IO/BranchListOutputProcessorTests.cs @@ -15,14 +15,20 @@ public void ShouldProcessOutput() { "* master ef7ecf9 [origin/master] Some project master", " feature/feature-1 f47d41b Untracked Feature 1", - " bugfixes/bugfix-1 e1b7f22 [origin/bugfixes/bugfix-1] Tracked Local Bugfix" + " bugfixes/bugfix-1 e1b7f22 [origin/bugfixes/bugfix-1] Tracked Local Bugfix", + " bugfixes/bugfix-2 e1b7f22 [origin/bugfixes/bugfix-2: ahead 3] Ahead with some changes", + " bugfixes/bugfix-3 e1b7f22 [origin/bugfixes/bugfix-3: ahead 3, behind 116] Ahead and Behind", + " bugfixes/bugfix-4 e1b7f22 [origin/bugfixes/bugfix-4: gone] No longer on server", }; AssertProcessOutput(output, new[] { new GitBranch("master", "origin/master"), - new GitBranch("feature/feature-1", ""), + new GitBranch("feature/feature-1"), new GitBranch("bugfixes/bugfix-1", "origin/bugfixes/bugfix-1"), + new GitBranch("bugfixes/bugfix-2", "origin/bugfixes/bugfix-2"), + new GitBranch("bugfixes/bugfix-3", "origin/bugfixes/bugfix-3"), + new GitBranch("bugfixes/bugfix-4", "origin/bugfixes/bugfix-4"), }); } @@ -44,4 +50,4 @@ private void AssertProcessOutput(IEnumerable lines, GitBranch[] expected results.ShouldAllBeEquivalentTo(expected); } } -} \ No newline at end of file +} diff --git a/src/tests/UnitTests/IO/GitLogEntryListTests.cs b/src/tests/UnitTests/IO/GitLogEntryListTests.cs index 769559435..49c2f7187 100644 --- a/src/tests/UnitTests/IO/GitLogEntryListTests.cs +++ b/src/tests/UnitTests/IO/GitLogEntryListTests.cs @@ -87,7 +87,7 @@ public void ListOf1ShouldEqualListOf1() commitTime, commitTime, new List { - new GitStatusEntry("SomePath", "SomeFullPath", "SomeProjectPath", GitFileStatus.Added, "SomeOriginalPath"), + new GitStatusEntry("SomePath", "SomeFullPath", "SomeProjectPath", GitFileStatus.Added, GitFileStatus.None, "SomeOriginalPath"), }, "MergeA", "MergeB") }; @@ -102,8 +102,7 @@ public void ListOf1ShouldEqualListOf1() commitTime, commitTime, new List(new[] { - new GitStatusEntry("SomePath", "SomeFullPath", "SomeProjectPath", GitFileStatus.Added, - "SomeOriginalPath"), + new GitStatusEntry("SomePath", "SomeFullPath", "SomeProjectPath", GitFileStatus.Added, GitFileStatus.None, "SomeOriginalPath"), }), "MergeA", "MergeB") }; @@ -126,7 +125,7 @@ public void ListOf2ShouldEqualListOf2() commitTime, commitTime, new List { - new GitStatusEntry("SomePath", "SomeFullPath", "SomeProjectPath", GitFileStatus.Added, "SomeOriginalPath"), + new GitStatusEntry("SomePath", "SomeFullPath", "SomeProjectPath", GitFileStatus.Added, GitFileStatus.None, "SomeOriginalPath"), }, "MergeA", "MergeB"), new GitLogEntry("`CommitID", @@ -148,7 +147,7 @@ public void ListOf2ShouldEqualListOf2() "Description", commitTime, commitTime, new List { - new GitStatusEntry("SomePath", "SomeFullPath", "SomeProjectPath", GitFileStatus.Added, "SomeOriginalPath") + new GitStatusEntry("SomePath", "SomeFullPath", "SomeProjectPath", GitFileStatus.Added, GitFileStatus.None, "SomeOriginalPath") }, "MergeA", "MergeB"), new GitLogEntry("`CommitID", @@ -181,8 +180,7 @@ public void ListOf2ShouldNotEqualListOf2InDifferentOrder() commitTime, commitTime, new List(new[] { - new GitStatusEntry("SomePath", "SomeFullPath", "SomeProjectPath", GitFileStatus.Added, - "SomeOriginalPath"), + new GitStatusEntry("SomePath", "SomeFullPath", "SomeProjectPath", GitFileStatus.Added, GitFileStatus.None, "SomeOriginalPath"), }), "MergeA", "MergeB"), new GitLogEntry("`CommitID", @@ -213,8 +211,7 @@ public void ListOf2ShouldNotEqualListOf2InDifferentOrder() otherCommitTime, otherCommitTime, new List(new[] { - new GitStatusEntry("SomePath", "SomeFullPath", "SomeProjectPath", GitFileStatus.Added, - "SomeOriginalPath"), + new GitStatusEntry("SomePath", "SomeFullPath", "SomeProjectPath", GitFileStatus.Added, GitFileStatus.None, "SomeOriginalPath"), }), "MergeA", "MergeB") }; @@ -222,4 +219,4 @@ public void ListOf2ShouldNotEqualListOf2InDifferentOrder() entries.AssertNotEqual(otherEntries); } } -} \ No newline at end of file +} diff --git a/src/tests/UnitTests/IO/GitLogEntryTests.cs b/src/tests/UnitTests/IO/GitLogEntryTests.cs index 7d5d43b33..dd0e66940 100644 --- a/src/tests/UnitTests/IO/GitLogEntryTests.cs +++ b/src/tests/UnitTests/IO/GitLogEntryTests.cs @@ -89,7 +89,7 @@ public void ShouldEqualAnotherWhenChangesIsNotEmpty() commitTime, commitTime, new List(new[] { - new GitStatusEntry("SomePath", "SomeFullPath", "SomeProjectPath", GitFileStatus.Added, "SomeOriginalPath"), + new GitStatusEntry("SomePath", "SomeFullPath", "SomeProjectPath", GitFileStatus.Added, GitFileStatus.None, "SomeOriginalPath"), }), "MergeA", "MergeB"); @@ -101,8 +101,7 @@ public void ShouldEqualAnotherWhenChangesIsNotEmpty() commitTime, commitTime, new List(new[] { - new GitStatusEntry("SomePath", "SomeFullPath", "SomeProjectPath", GitFileStatus.Added, - "SomeOriginalPath") + new GitStatusEntry("SomePath", "SomeFullPath", "SomeProjectPath", GitFileStatus.Added,GitFileStatus.None, "SomeOriginalPath") }), "MergeA", "MergeB"); @@ -122,8 +121,7 @@ public void ShouldNotEqualAnotherWhenChangesAreDifferent() commitTime, commitTime, new List(new[] { - new GitStatusEntry("ASDFASDF", "SomeFullPath", "SomeProjectPath", GitFileStatus.Added, - "SomeOriginalPath"), + new GitStatusEntry("ASDFASDF", "SomeFullPath", "SomeProjectPath", GitFileStatus.Added,GitFileStatus.None, "SomeOriginalPath"), }), "MergeA", "MergeB"); @@ -135,12 +133,11 @@ public void ShouldNotEqualAnotherWhenChangesAreDifferent() commitTime, commitTime, new List(new[] { - new GitStatusEntry("SomePath", "SomeFullPath", "SomeProjectPath", GitFileStatus.Added, - "SomeOriginalPath") + new GitStatusEntry("SomePath", "SomeFullPath", "SomeProjectPath", GitFileStatus.Added, GitFileStatus.None, "SomeOriginalPath") }), "MergeA", "MergeB"); gitLogEntry1.AssertNotEqual(gitLogEntry2); } } -} \ No newline at end of file +} diff --git a/src/tests/UnitTests/IO/GitObjectFactoryTests.cs b/src/tests/UnitTests/IO/GitObjectFactoryTests.cs new file mode 100644 index 000000000..95cb8aa0d --- /dev/null +++ b/src/tests/UnitTests/IO/GitObjectFactoryTests.cs @@ -0,0 +1,50 @@ +using GitHub.Unity; +using NCrunch.Framework; +using NSubstitute; +using NUnit.Framework; +using TestUtils; + +namespace UnitTests +{ + [TestFixture, Isolated] + class GitObjectFactoryTests + { + private static readonly SubstituteFactory SubstituteFactory = new SubstituteFactory(); + + [Test] + public void ShouldParseNormalFile() + { + NPath.FileSystem = SubstituteFactory.CreateFileSystem(new CreateFileSystemOptions() { + CurrentDirectory = @"c:\Projects\UnityProject" + }); + + var environment = Substitute.For(); + environment.RepositoryPath.Returns(@"c:\Projects\UnityProject".ToNPath()); + environment.UnityProjectPath.Returns(@"c:\Projects\UnityProject".ToNPath()); + + var gitObjectFactory = new GitObjectFactory(environment); + var gitStatusEntry = gitObjectFactory.CreateGitStatusEntry("hello.txt", GitFileStatus.None, GitFileStatus.Deleted); + + Assert.AreEqual(@"c:\Projects\UnityProject\hello.txt", gitStatusEntry.FullPath); + } + + + [Test] + public void ShouldParseOddFile() + { + NPath.FileSystem = SubstituteFactory.CreateFileSystem(new CreateFileSystemOptions() + { + CurrentDirectory = @"c:\Projects\UnityProject" + }); + + var environment = Substitute.For(); + environment.RepositoryPath.Returns(@"c:\Projects\UnityProject".ToNPath()); + environment.UnityProjectPath.Returns(@"c:\Projects\UnityProject".ToNPath()); + + var gitObjectFactory = new GitObjectFactory(environment); + var gitStatusEntry = gitObjectFactory.CreateGitStatusEntry("c:UsersOculusGoVideo.mp4", GitFileStatus.None, GitFileStatus.Deleted); + + Assert.AreEqual(@"c:\Projects\UnityProject\c:UsersOculusGoVideo.mp4", gitStatusEntry.FullPath); + } + } +} diff --git a/src/tests/UnitTests/IO/GitStatusEntryFactoryTests.cs b/src/tests/UnitTests/IO/GitStatusEntryFactoryTests.cs index 78fe0482b..fd0a7e685 100644 --- a/src/tests/UnitTests/IO/GitStatusEntryFactoryTests.cs +++ b/src/tests/UnitTests/IO/GitStatusEntryFactoryTests.cs @@ -39,11 +39,11 @@ public void CreateObjectWhenProjectRootIsChildOfGitRootAndFileInGitRoot() var expectedFullPath = repositoryPath.Combine(inputPath); var expectedProjectPath = expectedFullPath.RelativeTo(unityProjectPath); - var expected = new GitStatusEntry(inputPath, expectedFullPath, expectedProjectPath, inputStatus); + var expected = new GitStatusEntry(inputPath, expectedFullPath, expectedProjectPath, GitFileStatus.None, inputStatus); var gitStatusEntryFactory = new GitObjectFactory(environment); - var result = gitStatusEntryFactory.CreateGitStatusEntry(inputPath, inputStatus); + var result = gitStatusEntryFactory.CreateGitStatusEntry(inputPath, GitFileStatus.None, inputStatus); result.Should().Be(expected); } @@ -69,11 +69,11 @@ public void CreateObjectWhenProjectRootIsChildOfGitRootAndFileInProjectRoot() var expectedFullPath = repositoryPath.Combine(inputPath); const string expectedProjectPath = "Something.sln"; - var expected = new GitStatusEntry(inputPath, expectedFullPath, expectedProjectPath, inputStatus); + var expected = new GitStatusEntry(inputPath, expectedFullPath, expectedProjectPath, GitFileStatus.None, inputStatus); var gitStatusEntryFactory = new GitObjectFactory(environment); - var result = gitStatusEntryFactory.CreateGitStatusEntry(inputPath, inputStatus); + var result = gitStatusEntryFactory.CreateGitStatusEntry(inputPath, GitFileStatus.None, inputStatus); result.Should().Be(expected); } @@ -99,13 +99,13 @@ public void CreateObjectWhenProjectRootIsSameAsGitRootAndFileInGitRoot() var expectedFullPath = repositoryPath.Combine(inputPath); const string expectedProjectPath = inputPath; - var expected = new GitStatusEntry(inputPath, expectedFullPath, expectedProjectPath, inputStatus); + var expected = new GitStatusEntry(inputPath, expectedFullPath, expectedProjectPath, GitFileStatus.None, inputStatus); var gitStatusEntryFactory = new GitObjectFactory(environment); - var result = gitStatusEntryFactory.CreateGitStatusEntry(inputPath, inputStatus); + var result = gitStatusEntryFactory.CreateGitStatusEntry(inputPath, GitFileStatus.None, inputStatus); result.Should().Be(expected); } } -} \ No newline at end of file +} diff --git a/src/tests/UnitTests/IO/GitStatusEntryListTests.cs b/src/tests/UnitTests/IO/GitStatusEntryListTests.cs index bd2fbdf84..a38f7b503 100644 --- a/src/tests/UnitTests/IO/GitStatusEntryListTests.cs +++ b/src/tests/UnitTests/IO/GitStatusEntryListTests.cs @@ -13,19 +13,19 @@ public void ListOf2ShouldEqualListOf2() var gitStatusEntry1 = new[] { new GitStatusEntry("SomePath", "SomeFullPath", "SomeProjectPath", - GitFileStatus.Added, "SomeOriginalPath"), + GitFileStatus.None, GitFileStatus.Added, "SomeOriginalPath"), new GitStatusEntry("ASDFSomePath", "SomeFullPath", "SomeProjectPath", - GitFileStatus.Modified) + GitFileStatus.None, GitFileStatus.Modified) }; var gitStatusEntry2 = new[] { new GitStatusEntry("SomePath", "SomeFullPath", "SomeProjectPath", - GitFileStatus.Added, "SomeOriginalPath"), + GitFileStatus.None, GitFileStatus.Added, "SomeOriginalPath"), new GitStatusEntry("ASDFSomePath", "SomeFullPath", "SomeProjectPath", - GitFileStatus.Modified) + GitFileStatus.None, GitFileStatus.Modified) }; gitStatusEntry1.AssertEqual(gitStatusEntry2); @@ -37,22 +37,22 @@ public void ListOf2ShouldNotEqualListOf2InDifferentOrder() var gitStatusEntry1 = new[] { new GitStatusEntry("ASDFSomePath", "SomeFullPath", "SomeProjectPath", - GitFileStatus.Modified), + GitFileStatus.None, GitFileStatus.Modified), new GitStatusEntry("SomePath", "SomeFullPath", "SomeProjectPath", - GitFileStatus.Added, "SomeOriginalPath") + GitFileStatus.None, GitFileStatus.Added, "SomeOriginalPath") }; var gitStatusEntry2 = new[] { new GitStatusEntry("SomePath", "SomeFullPath", "SomeProjectPath", - GitFileStatus.Added, "SomeOriginalPath"), + GitFileStatus.None, GitFileStatus.Added, "SomeOriginalPath"), new GitStatusEntry("ASDFSomePath", "SomeFullPath", "SomeProjectPath", - GitFileStatus.Modified) + GitFileStatus.None, GitFileStatus.Modified) }; gitStatusEntry1.AssertNotEqual(gitStatusEntry2); } } -} \ No newline at end of file +} diff --git a/src/tests/UnitTests/IO/GitStatusEntryTests.cs b/src/tests/UnitTests/IO/GitStatusEntryTests.cs index c7fff8c4f..fa6566b11 100644 --- a/src/tests/UnitTests/IO/GitStatusEntryTests.cs +++ b/src/tests/UnitTests/IO/GitStatusEntryTests.cs @@ -12,22 +12,31 @@ public class GitStatusEntryTests public void ShouldNotBeEqualIfGitFileStatusIsDifferent() { var gitStatusEntry1 = new GitStatusEntry("SomePath", "SomeFullPath", "SomeProjectPath", - GitFileStatus.Added, "SomeOriginalPath"); + GitFileStatus.None, GitFileStatus.Added, "SomeOriginalPath"); var gitStatusEntry2 = new GitStatusEntry("SomePath", "SomeFullPath", "SomeProjectPath", - GitFileStatus.Modified, "SomeOriginalPath"); + GitFileStatus.None, GitFileStatus.Modified, "SomeOriginalPath"); gitStatusEntry1.Should().NotBe(gitStatusEntry2); + + gitStatusEntry1 = new GitStatusEntry("SomePath", "SomeFullPath", "SomeProjectPath", + GitFileStatus.Modified, GitFileStatus.Added, "SomeOriginalPath"); + + gitStatusEntry2 = new GitStatusEntry("SomePath", "SomeFullPath", "SomeProjectPath", + GitFileStatus.Renamed, GitFileStatus.Modified, "SomeOriginalPath"); + + gitStatusEntry1.Should().NotBe(gitStatusEntry2); + } [Test] public void ShouldNotBeEqualIfPathIsDifferent() { var gitStatusEntry1 = new GitStatusEntry("SomePath", "SomeFullPath", "SomeProjectPath", - GitFileStatus.Added, "SomeOriginalPath"); + GitFileStatus.None, GitFileStatus.Added, "SomeOriginalPath"); var gitStatusEntry2 = new GitStatusEntry("SomePath2", "SomeFullPath", "SomeProjectPath", - GitFileStatus.Added, "SomeOriginalPath"); + GitFileStatus.None, GitFileStatus.Added, "SomeOriginalPath"); gitStatusEntry1.Should().NotBe(gitStatusEntry2); } @@ -36,7 +45,7 @@ public void ShouldNotBeEqualIfPathIsDifferent() public void ShouldBeEqualIfOriginalpathIsNull() { var gitStatusEntry = new GitStatusEntry("SomePath", "SomeFullPath", "SomeProjectPath", - GitFileStatus.Added); + GitFileStatus.None, GitFileStatus.Added); gitStatusEntry.Should().Be(gitStatusEntry); } @@ -45,7 +54,7 @@ public void ShouldBeEqualIfOriginalpathIsNull() public void ShouldBeEqual() { var gitStatusEntry = new GitStatusEntry("SomePath", "SomeFullPath", "SomeProjectPath", - GitFileStatus.Added, "SomeOriginalPath"); + GitFileStatus.Added, GitFileStatus.None, "SomeOriginalPath"); gitStatusEntry.Should().Be(gitStatusEntry); } @@ -54,10 +63,10 @@ public void ShouldBeEqual() public void ShouldBeEqualToOther() { var gitStatusEntry = new GitStatusEntry("SomePath", "SomeFullPath", "SomeProjectPath", - GitFileStatus.Added, "SomeOriginalPath"); + GitFileStatus.Added, GitFileStatus.None, "SomeOriginalPath"); var gitStatusEntry2 = new GitStatusEntry("SomePath", "SomeFullPath", "SomeProjectPath", - GitFileStatus.Added, "SomeOriginalPath"); + GitFileStatus.Added, GitFileStatus.None, "SomeOriginalPath"); gitStatusEntry.Should().Be(gitStatusEntry2); } @@ -66,10 +75,10 @@ public void ShouldBeEqualToOther() public void ShouldNotBeEqualIfOneIsStaged() { var gitStatusEntry = new GitStatusEntry("SomePath", "SomeFullPath", "SomeProjectPath", - GitFileStatus.Added, "SomeOriginalPath", staged: true); + GitFileStatus.Added, GitFileStatus.None, "SomeOriginalPath"); var gitStatusEntry2 = new GitStatusEntry("SomePath", "SomeFullPath", "SomeProjectPath", - GitFileStatus.Added, "SomeOriginalPath"); + GitFileStatus.None, GitFileStatus.Added, "SomeOriginalPath"); gitStatusEntry.Should().NotBe(gitStatusEntry2); } @@ -78,12 +87,104 @@ public void ShouldNotBeEqualIfOneIsStaged() public void ShouldBeEqualIfBothAreStaged() { var gitStatusEntry = new GitStatusEntry("SomePath", "SomeFullPath", "SomeProjectPath", - GitFileStatus.Added, "SomeOriginalPath", true); + GitFileStatus.Added, GitFileStatus.None, "SomeOriginalPath"); var gitStatusEntry2 = new GitStatusEntry("SomePath", "SomeFullPath", "SomeProjectPath", - GitFileStatus.Added, "SomeOriginalPath", true); + GitFileStatus.Added, GitFileStatus.None, "SomeOriginalPath"); gitStatusEntry.Should().Be(gitStatusEntry2); } + + [Test] + public void StagedIsTrue() + { + var gitStatusEntry = new GitStatusEntry("SomePath", "SomeFullPath", "SomeProjectPath", + GitFileStatus.Added, GitFileStatus.None, "SomeOriginalPath"); + gitStatusEntry.Staged.Should().BeTrue(); + + gitStatusEntry = new GitStatusEntry("SomePath", "SomeFullPath", "SomeProjectPath", + GitFileStatus.Modified, GitFileStatus.None, "SomeOriginalPath"); + gitStatusEntry.Staged.Should().BeTrue(); + + gitStatusEntry = new GitStatusEntry("SomePath", "SomeFullPath", "SomeProjectPath", + GitFileStatus.Deleted, GitFileStatus.None, "SomeOriginalPath"); + gitStatusEntry.Staged.Should().BeTrue(); + + gitStatusEntry = new GitStatusEntry("SomePath", "SomeFullPath", "SomeProjectPath", + GitFileStatus.Copied, GitFileStatus.None, "SomeOriginalPath"); + gitStatusEntry.Staged.Should().BeTrue(); + + gitStatusEntry = new GitStatusEntry("SomePath", "SomeFullPath", "SomeProjectPath", + GitFileStatus.Renamed, GitFileStatus.None, "SomeOriginalPath"); + gitStatusEntry.Staged.Should().BeTrue(); + } + + [Test] + public void StagedIsFalse() + { + var gitStatusEntry = new GitStatusEntry("SomePath", "SomeFullPath", "SomeProjectPath", + GitFileStatus.None, GitFileStatus.Added, "SomeOriginalPath"); + gitStatusEntry.Staged.Should().BeFalse(); + + gitStatusEntry = new GitStatusEntry("SomePath", "SomeFullPath", "SomeProjectPath", + GitFileStatus.None, GitFileStatus.Modified, "SomeOriginalPath"); + gitStatusEntry.Staged.Should().BeFalse(); + + gitStatusEntry = new GitStatusEntry("SomePath", "SomeFullPath", "SomeProjectPath", + GitFileStatus.None, GitFileStatus.Deleted, "SomeOriginalPath"); + gitStatusEntry.Staged.Should().BeFalse(); + + gitStatusEntry = new GitStatusEntry("SomePath", "SomeFullPath", "SomeProjectPath", + GitFileStatus.None, GitFileStatus.Copied, "SomeOriginalPath"); + gitStatusEntry.Staged.Should().BeFalse(); + + gitStatusEntry = new GitStatusEntry("SomePath", "SomeFullPath", "SomeProjectPath", + GitFileStatus.None, GitFileStatus.Renamed, "SomeOriginalPath"); + gitStatusEntry.Staged.Should().BeFalse(); + + gitStatusEntry = new GitStatusEntry("SomePath", "SomeFullPath", "SomeProjectPath", + GitFileStatus.Untracked, GitFileStatus.Untracked, "SomeOriginalPath"); + gitStatusEntry.Staged.Should().BeFalse(); + + gitStatusEntry = new GitStatusEntry("SomePath", "SomeFullPath", "SomeProjectPath", + GitFileStatus.Ignored, GitFileStatus.Ignored, "SomeOriginalPath"); + gitStatusEntry.Staged.Should().BeFalse(); + + gitStatusEntry = new GitStatusEntry("SomePath", "SomeFullPath", "SomeProjectPath", + GitFileStatus.Unmerged, GitFileStatus.Added, "SomeOriginalPath"); + gitStatusEntry.Staged.Should().BeFalse(); + } + + [Test] + public void UnmergedDetection() + { + var gitStatusEntry = new GitStatusEntry("SomePath", "SomeFullPath", "SomeProjectPath", + GitFileStatus.Added, GitFileStatus.Added, "SomeOriginalPath"); + gitStatusEntry.Unmerged.Should().BeTrue(); + + gitStatusEntry = new GitStatusEntry("SomePath", "SomeFullPath", "SomeProjectPath", + GitFileStatus.Deleted, GitFileStatus.Deleted, "SomeOriginalPath"); + gitStatusEntry.Unmerged.Should().BeTrue(); + + gitStatusEntry = new GitStatusEntry("SomePath", "SomeFullPath", "SomeProjectPath", + GitFileStatus.Unmerged, GitFileStatus.Unmerged, "SomeOriginalPath"); + gitStatusEntry.Unmerged.Should().BeTrue(); + + gitStatusEntry = new GitStatusEntry("SomePath", "SomeFullPath", "SomeProjectPath", + GitFileStatus.Added, GitFileStatus.Unmerged, "SomeOriginalPath"); + gitStatusEntry.Unmerged.Should().BeTrue(); + + gitStatusEntry = new GitStatusEntry("SomePath", "SomeFullPath", "SomeProjectPath", + GitFileStatus.Unmerged, GitFileStatus.Added, "SomeOriginalPath"); + gitStatusEntry.Unmerged.Should().BeTrue(); + + gitStatusEntry = new GitStatusEntry("SomePath", "SomeFullPath", "SomeProjectPath", + GitFileStatus.Deleted, GitFileStatus.Unmerged, "SomeOriginalPath"); + gitStatusEntry.Unmerged.Should().BeTrue(); + + gitStatusEntry = new GitStatusEntry("SomePath", "SomeFullPath", "SomeProjectPath", + GitFileStatus.Unmerged, GitFileStatus.Deleted, "SomeOriginalPath"); + gitStatusEntry.Unmerged.Should().BeTrue(); + } } -} \ No newline at end of file +} diff --git a/src/tests/UnitTests/IO/LockOutputProcessorTests.cs b/src/tests/UnitTests/IO/LockOutputProcessorTests.cs index 0fb8c4a44..07797f03b 100644 --- a/src/tests/UnitTests/IO/LockOutputProcessorTests.cs +++ b/src/tests/UnitTests/IO/LockOutputProcessorTests.cs @@ -68,6 +68,25 @@ public void ShouldParseTwoLocksFormat() new GitLock("2f9cfde9c159d50e235cc1402c3e534b0bf2198afb20760697a5f9b07bf04fb3", "somezip.zip".ToNPath(), new GitUser("GitHub User", ""), now) }; + AssertProcessOutput(output, expected); + } + + [Test] + public void ShouldParseVSTSLocksFormat() + { + var nowString = DateTimeOffset.UtcNow.ToString(@"yyyy-MM-dd\THH\:mm\:ss.ff\Z"); + var now = DateTimeOffset.ParseExact(nowString, Constants.Iso8601Formats, CultureInfo.InvariantCulture, DateTimeStyles.AssumeUniversal); + var output = new[] + { + $@"[{{""id"":""7"" ,""path"":""Assets/Main.unity"",""owner"":{{""name"":""GitHub User""}},""locked_at"":""{nowString}""}}]", + string.Empty, + "1 lock(s) matched query.", + null + }; + + var expected = new[] { + new GitLock("7", "Assets/Main.unity".ToNPath(), new GitUser("GitHub User", ""), now), + }; AssertProcessOutput(output, expected); } diff --git a/src/tests/UnitTests/IO/LogEntryOutputProcessorTests.cs b/src/tests/UnitTests/IO/LogEntryOutputProcessorTests.cs index 5c0bc3a5c..f613210c4 100644 --- a/src/tests/UnitTests/IO/LogEntryOutputProcessorTests.cs +++ b/src/tests/UnitTests/IO/LogEntryOutputProcessorTests.cs @@ -46,15 +46,103 @@ public void ShouldParseSingleCommit() { new GitLogEntry("1cd4b9154a88bc8c7b09cb8cacc79bf1d5bde8cf", "Author Person", "author@example.com", - "Author Person", "author@example.com", - "Rename RepositoryModelBase to RepositoryModel", + "Author Person", "author@example.com", "Rename RepositoryModelBase to RepositoryModel", + "", commitTime, commitTime, new List { new GitStatusEntry("src/GitHub.App/Models/RemoteRepositoryModel.cs", TestRootPath + @"\src/GitHub.App/Models/RemoteRepositoryModel.cs", null, - GitFileStatus.Modified), + GitFileStatus.Modified, GitFileStatus.None), + }) + }; + + AssertProcessOutput(output, expected); + } + + [Test] + public void ShouldParseSummaryAndDescription() + { + var output = new[] + { + "1cd4b9154a88bc8c7b09cb8cacc79bf1d5bde8cf", + "865b8d9d6e5e3bd6d7a4dc9c9f3588192314942c", + "Author Person", + "author@example.com", + "2017-01-06T15:36:57+01:00", + "Author Person", + "author@example.com", + "2017-01-06T15:36:57+01:00", + "Rename RepositoryModelBase to RepositoryModel", + "", + "This is a line on the description", + "---GHUBODYEND---", + "M src/GitHub.App/Models/RemoteRepositoryModel.cs", + null, + }; + + var commitTime = new DateTimeOffset(2017, 1, 6, 15, 36, 57, TimeSpan.FromHours(1)); + + var expected = new[] + { + new GitLogEntry("1cd4b9154a88bc8c7b09cb8cacc79bf1d5bde8cf", + "Author Person", "author@example.com", + "Author Person", "author@example.com", + "Rename RepositoryModelBase to RepositoryModel", + "This is a line on the description", + commitTime, + commitTime, + new List + { + new GitStatusEntry("src/GitHub.App/Models/RemoteRepositoryModel.cs", + TestRootPath + @"\src/GitHub.App/Models/RemoteRepositoryModel.cs", null, + GitFileStatus.Modified, GitFileStatus.None), + }) + }; + + AssertProcessOutput(output, expected); + } + + [Test] + public void ShouldParseSummaryAndDescriptionWithExtraNewLines() + { + var output = new[] + { + "1cd4b9154a88bc8c7b09cb8cacc79bf1d5bde8cf", + "865b8d9d6e5e3bd6d7a4dc9c9f3588192314942c", + "Author Person", + "author@example.com", + "2017-01-06T15:36:57+01:00", + "Author Person", + "author@example.com", + "2017-01-06T15:36:57+01:00", + "Rename RepositoryModelBase to RepositoryModel", + "", + "", + "", + "This is a line on the description", + "---GHUBODYEND---", + "M src/GitHub.App/Models/RemoteRepositoryModel.cs", + null, + }; + + var commitTime = new DateTimeOffset(2017, 1, 6, 15, 36, 57, TimeSpan.FromHours(1)); + + var expected = new[] + { + new GitLogEntry("1cd4b9154a88bc8c7b09cb8cacc79bf1d5bde8cf", + "Author Person", "author@example.com", + "Author Person", "author@example.com", + "Rename RepositoryModelBase to RepositoryModel", + Environment.NewLine + Environment.NewLine + "This is a line on the description", + commitTime, + commitTime, + new List + { + new GitStatusEntry("src/GitHub.App/Models/RemoteRepositoryModel.cs", + TestRootPath + @"\src/GitHub.App/Models/RemoteRepositoryModel.cs", null, + GitFileStatus.Modified, GitFileStatus.None), }) }; @@ -77,4 +165,4 @@ private void AssertProcessOutput(IEnumerable lines, GitLogEntry[] expect results.AssertEqual(expected); } } -} \ No newline at end of file +} diff --git a/src/tests/UnitTests/IO/StatusOutputProcessorTests.cs b/src/tests/UnitTests/IO/StatusOutputProcessorTests.cs index da18ec51c..61fefa82f 100644 --- a/src/tests/UnitTests/IO/StatusOutputProcessorTests.cs +++ b/src/tests/UnitTests/IO/StatusOutputProcessorTests.cs @@ -1,5 +1,6 @@ using TestUtils; using System.Collections.Generic; +using System.Linq; using NUnit.Framework; using GitHub.Unity; @@ -28,12 +29,44 @@ public void ShouldParseDirtyWorkingTreeUntracked() LocalBranch = "master", Entries = new List { - new GitStatusEntry("deploy.cmd", TestRootPath + @"\deploy.cmd", null, GitFileStatus.Deleted), - new GitStatusEntry("GitHubVS.sln", TestRootPath + @"\GitHubVS.sln", null, GitFileStatus.Modified), - new GitStatusEntry("README2.md", TestRootPath + @"\README2.md", null, GitFileStatus.Renamed, "README.md", true), - new GitStatusEntry("something added.txt", TestRootPath + @"\something added.txt", null, GitFileStatus.Added, staged: true), - new GitStatusEntry("something.txt", TestRootPath + @"\something.txt", null, GitFileStatus.Untracked), - } + new GitStatusEntry("GitHubVS.sln", TestRootPath + @"\GitHubVS.sln", null, GitFileStatus.None, GitFileStatus.Modified), + new GitStatusEntry("README2.md", TestRootPath + @"\README2.md", null, GitFileStatus.Renamed, GitFileStatus.None, "README.md"), + new GitStatusEntry("deploy.cmd", TestRootPath + @"\deploy.cmd", null, GitFileStatus.None, GitFileStatus.Deleted), + new GitStatusEntry("something added.txt", TestRootPath + @"\something added.txt", null, GitFileStatus.Added, GitFileStatus.None), + new GitStatusEntry("something.txt", TestRootPath + @"\something.txt", null, GitFileStatus.Untracked, GitFileStatus.Untracked), + }.OrderBy(entry => entry.Path, GitStatusOutputProcessor.StatusOutputPathComparer.Instance).ToList() + }); + } + + [Test] + public void ShouldParseUnmergedStates() + { + var output = new[] + { + "## master", + "DD something1.txt", + "AU something2.txt", + "UD something3.txt", + "UA something4.txt", + "DU something5.txt", + "AA something6.txt", + "UU something7.txt", + null + }; + + AssertProcessOutput(output, new GitStatus + { + LocalBranch = "master", + Entries = new List + { + new GitStatusEntry("something1.txt", TestRootPath + @"\something1.txt", null, GitFileStatus.Deleted, GitFileStatus.Deleted), + new GitStatusEntry("something2.txt", TestRootPath + @"\something2.txt", null, GitFileStatus.Added, GitFileStatus.Unmerged), + new GitStatusEntry("something3.txt", TestRootPath + @"\something3.txt", null, GitFileStatus.Unmerged, GitFileStatus.Deleted), + new GitStatusEntry("something4.txt", TestRootPath + @"\something4.txt", null, GitFileStatus.Unmerged, GitFileStatus.Added), + new GitStatusEntry("something5.txt", TestRootPath + @"\something5.txt", null, GitFileStatus.Deleted, GitFileStatus.Unmerged), + new GitStatusEntry("something6.txt", TestRootPath + @"\something6.txt", null, GitFileStatus.Added, GitFileStatus.Added), + new GitStatusEntry("something7.txt", TestRootPath + @"\something7.txt", null, GitFileStatus.Unmerged, GitFileStatus.Unmerged), + }.OrderBy(entry => entry.Path, GitStatusOutputProcessor.StatusOutputPathComparer.Instance).ToList() }); } @@ -59,12 +92,12 @@ public void ShouldParseDirtyWorkingTreeTrackedAhead1Behind1() Behind = 1, Entries = new List { - new GitStatusEntry("deploy.cmd", TestRootPath + @"\deploy.cmd", null, GitFileStatus.Deleted), - new GitStatusEntry("GitHubVS.sln", TestRootPath + @"\GitHubVS.sln", null, GitFileStatus.Modified), - new GitStatusEntry("README2.md", TestRootPath + @"\README2.md", null, GitFileStatus.Renamed, "README.md", true), - new GitStatusEntry("something added.txt", TestRootPath + @"\something added.txt", null, GitFileStatus.Added, staged: true), - new GitStatusEntry("something.txt", TestRootPath + @"\something.txt", null, GitFileStatus.Untracked), - } + new GitStatusEntry("GitHubVS.sln", TestRootPath + @"\GitHubVS.sln", null, GitFileStatus.None, GitFileStatus.Modified), + new GitStatusEntry("README2.md", TestRootPath + @"\README2.md", null, GitFileStatus.Renamed, GitFileStatus.None, "README.md"), + new GitStatusEntry("deploy.cmd", TestRootPath + @"\deploy.cmd", null, GitFileStatus.None, GitFileStatus.Deleted), + new GitStatusEntry("something added.txt", TestRootPath + @"\something added.txt", null, GitFileStatus.Added, GitFileStatus.None), + new GitStatusEntry("something.txt", TestRootPath + @"\something.txt", null, GitFileStatus.Untracked, GitFileStatus.Untracked), + }.OrderBy(entry => entry.Path, GitStatusOutputProcessor.StatusOutputPathComparer.Instance).ToList() }); } @@ -89,12 +122,12 @@ public void ShouldParseDirtyWorkingTreeTrackedAhead1() Ahead = 1, Entries = new List { - new GitStatusEntry("deploy.cmd", TestRootPath + @"\deploy.cmd", null, GitFileStatus.Deleted), - new GitStatusEntry("GitHubVS.sln", TestRootPath + @"\GitHubVS.sln", null, GitFileStatus.Modified), - new GitStatusEntry("README2.md", TestRootPath + @"\README2.md", null, GitFileStatus.Renamed, "README.md", true), - new GitStatusEntry("something added.txt", TestRootPath + @"\something added.txt", null, GitFileStatus.Added, staged: true), - new GitStatusEntry("something.txt", TestRootPath + @"\something.txt", null, GitFileStatus.Untracked), - } + new GitStatusEntry("GitHubVS.sln", TestRootPath + @"\GitHubVS.sln", null, GitFileStatus.None, GitFileStatus.Modified), + new GitStatusEntry("README2.md", TestRootPath + @"\README2.md", null, GitFileStatus.Renamed, GitFileStatus.None, "README.md"), + new GitStatusEntry("deploy.cmd", TestRootPath + @"\deploy.cmd", null, GitFileStatus.None, GitFileStatus.Deleted), + new GitStatusEntry("something added.txt", TestRootPath + @"\something added.txt", null, GitFileStatus.Added, GitFileStatus.None), + new GitStatusEntry("something.txt", TestRootPath + @"\something.txt", null, GitFileStatus.Untracked, GitFileStatus.Untracked), + }.OrderBy(entry => entry.Path, GitStatusOutputProcessor.StatusOutputPathComparer.Instance).ToList() }); } @@ -119,12 +152,12 @@ public void ShouldParseDirtyWorkingTreeTrackedBehind1() Behind = 1, Entries = new List { - new GitStatusEntry("deploy.cmd", TestRootPath + @"\deploy.cmd", null, GitFileStatus.Deleted), - new GitStatusEntry("GitHubVS.sln", TestRootPath + @"\GitHubVS.sln", null, GitFileStatus.Modified), - new GitStatusEntry("README2.md", TestRootPath + @"\README2.md", null, GitFileStatus.Renamed, "README.md", true), - new GitStatusEntry("something added.txt", TestRootPath + @"\something added.txt", null, GitFileStatus.Added, staged: true), - new GitStatusEntry("something.txt", TestRootPath + @"\something.txt", null, GitFileStatus.Untracked), - } + new GitStatusEntry("GitHubVS.sln", TestRootPath + @"\GitHubVS.sln", null, GitFileStatus.None, GitFileStatus.Modified), + new GitStatusEntry("README2.md", TestRootPath + @"\README2.md", null, GitFileStatus.Renamed, GitFileStatus.None, "README.md"), + new GitStatusEntry("deploy.cmd", TestRootPath + @"\deploy.cmd", null, GitFileStatus.None, GitFileStatus.Deleted), + new GitStatusEntry("something added.txt", TestRootPath + @"\something added.txt", null, GitFileStatus.Added, GitFileStatus.None), + new GitStatusEntry("something.txt", TestRootPath + @"\something.txt", null, GitFileStatus.Untracked, GitFileStatus.Untracked), + }.OrderBy(entry => entry.Path, GitStatusOutputProcessor.StatusOutputPathComparer.Instance).ToList() }); } @@ -148,12 +181,12 @@ public void ShouldParseDirtyWorkingTreeTracked() RemoteBranch = "origin/master", Entries = new List { - new GitStatusEntry("deploy.cmd", TestRootPath + @"\deploy.cmd", null, GitFileStatus.Deleted), - new GitStatusEntry("GitHubVS.sln", TestRootPath + @"\GitHubVS.sln", null, GitFileStatus.Modified), - new GitStatusEntry("README2.md", TestRootPath + @"\README2.md", null, GitFileStatus.Renamed, "README.md", true), - new GitStatusEntry("something added.txt", TestRootPath + @"\something added.txt", null, GitFileStatus.Added, staged: true), - new GitStatusEntry("something.txt", TestRootPath + @"\something.txt", null, GitFileStatus.Untracked), - } + new GitStatusEntry("GitHubVS.sln", TestRootPath + @"\GitHubVS.sln", null, GitFileStatus.None, GitFileStatus.Modified), + new GitStatusEntry("README2.md", TestRootPath + @"\README2.md", null, GitFileStatus.Renamed, GitFileStatus.None, "README.md"), + new GitStatusEntry("deploy.cmd", TestRootPath + @"\deploy.cmd", null, GitFileStatus.None, GitFileStatus.Deleted), + new GitStatusEntry("something added.txt", TestRootPath + @"\something added.txt", null, GitFileStatus.Added, GitFileStatus.None), + new GitStatusEntry("something.txt", TestRootPath + @"\something.txt", null, GitFileStatus.Untracked, GitFileStatus.Untracked), + }.OrderBy(entry => entry.Path, GitStatusOutputProcessor.StatusOutputPathComparer.Instance).ToList() }); } @@ -245,6 +278,46 @@ public void ShouldParseCleanWorkingTreeTracked() }); } + [Test] + public void ShouldSortOutputCorrectly() + { + var output = new[] + { + "## master", + "?? Assets/Assets.Test.dll.meta", + "?? Assets/Assets.Test.dll", + "?? Plugins/GitHub.Unity.dll", + "?? Plugins/GitHub.Unity.dll.mdb", + "?? Plugins/GitHub.Unity.dll.mdb.meta", + "?? Plugins/GitHub.Unity2.dll", + "?? Plugins/GitHub.Unity2.dll.mdb", + "?? Plugins/GitHub.Unity2.dll.mdb.meta", + "?? Plugins/GitHub.Unity2.dll.meta", + "?? Plugins/GitHub.Unity.dll.meta", + "?? blah.txt", + null + }; + + AssertProcessOutput(output, new GitStatus + { + LocalBranch = "master", + Entries = new List + { + new GitStatusEntry(@"Assets/Assets.Test.dll", TestRootPath + @"\Assets/Assets.Test.dll", null, GitFileStatus.Untracked, GitFileStatus.Untracked), + new GitStatusEntry(@"Assets/Assets.Test.dll.meta", TestRootPath + @"\Assets/Assets.Test.dll.meta", null, GitFileStatus.Untracked, GitFileStatus.Untracked), + new GitStatusEntry(@"blah.txt", TestRootPath + @"\blah.txt", null, GitFileStatus.Untracked, GitFileStatus.Untracked), + new GitStatusEntry(@"Plugins/GitHub.Unity.dll", TestRootPath + @"\Plugins/GitHub.Unity.dll", null, GitFileStatus.Untracked, GitFileStatus.Untracked), + new GitStatusEntry(@"Plugins/GitHub.Unity.dll.meta", TestRootPath + @"\Plugins/GitHub.Unity.dll.meta", null, GitFileStatus.Untracked, GitFileStatus.Untracked), + new GitStatusEntry(@"Plugins/GitHub.Unity.dll.mdb", TestRootPath + @"\Plugins/GitHub.Unity.dll.mdb", null, GitFileStatus.Untracked, GitFileStatus.Untracked), + new GitStatusEntry(@"Plugins/GitHub.Unity.dll.mdb.meta", TestRootPath + @"\Plugins/GitHub.Unity.dll.mdb.meta", null, GitFileStatus.Untracked, GitFileStatus.Untracked), + new GitStatusEntry(@"Plugins/GitHub.Unity2.dll", TestRootPath + @"\Plugins/GitHub.Unity2.dll", null, GitFileStatus.Untracked, GitFileStatus.Untracked), + new GitStatusEntry(@"Plugins/GitHub.Unity2.dll.meta", TestRootPath + @"\Plugins/GitHub.Unity2.dll.meta", null, GitFileStatus.Untracked, GitFileStatus.Untracked), + new GitStatusEntry(@"Plugins/GitHub.Unity2.dll.mdb", TestRootPath + @"\Plugins/GitHub.Unity2.dll.mdb", null, GitFileStatus.Untracked, GitFileStatus.Untracked), + new GitStatusEntry(@"Plugins/GitHub.Unity2.dll.mdb.meta", TestRootPath + @"\Plugins/GitHub.Unity2.dll.mdb.meta", null, GitFileStatus.Untracked, GitFileStatus.Untracked), + } + }); + } + private void AssertProcessOutput(IEnumerable lines, GitStatus expected) { var gitObjectFactory = SubstituteFactory.CreateGitObjectFactory(TestRootPath); @@ -262,4 +335,4 @@ private void AssertProcessOutput(IEnumerable lines, GitStatus expected) result.Value.AssertEqual(expected); } } -} \ No newline at end of file +} diff --git a/src/tests/UnitTests/IO/WindowsDiskUsageOutputProcessorTests.cs b/src/tests/UnitTests/IO/WindowsDiskUsageOutputProcessorTests.cs index 5e1a327e8..860f7cdfd 100644 --- a/src/tests/UnitTests/IO/WindowsDiskUsageOutputProcessorTests.cs +++ b/src/tests/UnitTests/IO/WindowsDiskUsageOutputProcessorTests.cs @@ -48,7 +48,7 @@ public void WindowsDiskUsageOutput() private void AssertProcessOutput(IEnumerable lines, int expected) { - int? result = null; + long? result = null; var outputProcessor = new WindowsDiskUsageOutputProcessor(); outputProcessor.OnEntry += status => { result = status; }; @@ -61,4 +61,4 @@ private void AssertProcessOutput(IEnumerable lines, int expected) Assert.AreEqual(expected, result.Value); } } -} \ No newline at end of file +} diff --git a/src/tests/UnitTests/Primitives/SerializationTests.cs b/src/tests/UnitTests/Primitives/SerializationTests.cs index 32bcdf76b..17faab0f3 100644 --- a/src/tests/UnitTests/Primitives/SerializationTests.cs +++ b/src/tests/UnitTests/Primitives/SerializationTests.cs @@ -11,6 +11,41 @@ namespace UnitTests.Primitives [TestFixture] class SerializationTests { + [TestCase("2018-05-01T12:04:29.1234567-02:00", "2018-05-01T14:04:29.123+00:00")] + [TestCase("2018-05-01T12:04:29.123456-02:00", "2018-05-01T14:04:29.123+00:00")] + [TestCase("2018-05-01T12:04:29.12345-02:00", "2018-05-01T14:04:29.123+00:00")] + [TestCase("2018-05-01T12:04:29.1234-02:00", "2018-05-01T14:04:29.123+00:00")] + [TestCase("2018-05-01T12:04:29.123-02:00", "2018-05-01T14:04:29.123+00:00")] + [TestCase("2018-05-01T12:04:29.12-02:00", "2018-05-01T14:04:29.120+00:00")] + [TestCase("2018-05-01T12:04:29.1-02:00", "2018-05-01T14:04:29.100+00:00")] + [TestCase("2018-05-01T12:04:29-02:00", "2018-05-01T14:04:29.000+00:00")] + [TestCase("2018-05-01T12:04:29.1234567Z", "2018-05-01T12:04:29.123+00:00")] + [TestCase("2018-05-01T12:04:29.123456Z", "2018-05-01T12:04:29.123+00:00")] + [TestCase("2018-05-01T12:04:29.12345Z", "2018-05-01T12:04:29.123+00:00")] + [TestCase("2018-05-01T12:04:29.1234Z", "2018-05-01T12:04:29.123+00:00")] + [TestCase("2018-05-01T12:04:29.123Z", "2018-05-01T12:04:29.123+00:00")] + [TestCase("2018-05-01T12:04:29.12Z", "2018-05-01T12:04:29.120+00:00")] + [TestCase("2018-05-01T12:04:29.1Z", "2018-05-01T12:04:29.100+00:00")] + [TestCase("2018-05-01T12:04:29Z", "2018-05-01T12:04:29.000+00:00")] + public void FromLocalStringToUniversalDateTimeOffset(string input, string expected) + { + var dtInput = DateTimeOffset.ParseExact(input, Constants.Iso8601Formats, CultureInfo.InvariantCulture, Constants.DateTimeStyle); + var output = dtInput.ToUniversalTime().ToString(Constants.Iso8601Format); + Assert.AreEqual(expected, output); + + var json = $@"{{""date"":""{input}""}}"; + Assert.DoesNotThrow(() => json.FromJson(lowerCase: true)); + } + + [Test] + public void JsonSerializationUsesKnownFormat() + { + var now = DateTimeOffset.Now; + var output = new ADateTimeOffset { Date = now }; + var json = output.ToJson(lowerCase: true); + Assert.AreEqual($@"{{""date"":""{ now.ToUniversalTime().ToString(Constants.Iso8601Format, CultureInfo.InvariantCulture) }""}}", json); + } + [Test] public void DateTimeSerializationRoundTrip() { @@ -37,6 +72,27 @@ public void DateTimeSerializationRoundTrip() Assert.AreEqual(dt3, dt4); } + [Test] + public void DateTimeSerializationRoundTripFormatPointZ() + { + var dt1 = DateTimeOffset.ParseExact("2018-05-01T15:04:29.00Z", Constants.Iso8601Formats, CultureInfo.InvariantCulture, Constants.DateTimeStyle); + var str1 = dt1.ToJson(); + var ret1 = str1.FromJson(); + Assert.AreEqual(dt1, ret1); + + var dt2 = DateTimeOffset.ParseExact("2018-05-01T15:04:29Z", Constants.Iso8601Formats, CultureInfo.InvariantCulture, Constants.DateTimeStyle); + var str2 = dt2.ToJson(); + var ret2 = str2.FromJson(); + Assert.AreEqual(dt2, ret2); + + Assert.AreEqual(dt1, dt2); + } + + class ADateTimeOffset + { + public DateTimeOffset Date; + } + class TestData { public List Things { get; set; } = new List(); diff --git a/src/tests/UnitTests/ProcessManagerExtensions.cs b/src/tests/UnitTests/ProcessManagerExtensions.cs deleted file mode 100644 index c4043551f..000000000 --- a/src/tests/UnitTests/ProcessManagerExtensions.cs +++ /dev/null @@ -1,109 +0,0 @@ -using GitHub.Unity; -using System.Collections.Generic; -using System.Text; -using System.Threading; -using System.Threading.Tasks; - -namespace UnitTests -{ - static class ProcessManagerExtensions - { - static NPath defaultGitPath = "git".ToNPath(); - - public static async Task> GetGitBranches(this ProcessManager processManager, - NPath workingDirectory, - NPath? gitPath = null) - { - var processor = new BranchListOutputProcessor(); - NPath path = gitPath ?? defaultGitPath; - - var results = await new ProcessTaskWithListOutput(CancellationToken.None, processor) - .Configure(processManager, path, "branch -vv", workingDirectory, false) - .Start() - .Task; - - return results; - } - - public static async Task> GetGitLogEntries(this ProcessManager processManager, - NPath workingDirectory, - IEnvironment environment, IFileSystem filesystem, IProcessEnvironment gitEnvironment, - int? logCount = null, - NPath? gitPath = null) - { - var gitStatusEntryFactory = new GitObjectFactory(environment); - - var processor = new LogEntryOutputProcessor(gitStatusEntryFactory); - - var logNameStatus = @"log --pretty=format:""%H%n%P%n%aN%n%aE%n%aI%n%cN%n%cE%n%cI%n%B---GHUBODYEND---"" --name-status"; - - if (logCount.HasValue) - { - logNameStatus = logNameStatus + " -" + logCount.Value; - } - - NPath path = gitPath ?? defaultGitPath; - - var results = await new ProcessTaskWithListOutput(CancellationToken.None, processor) - .Configure(processManager, path, logNameStatus, workingDirectory, false) - .Start() - .Task; - - return results; - } - - public static async Task GetGitStatus(this ProcessManager processManager, - NPath workingDirectory, - IEnvironment environment, IFileSystem filesystem, IProcessEnvironment gitEnvironment, - NPath? gitPath = null) - { - var gitStatusEntryFactory = new GitObjectFactory(environment); - var processor = new GitStatusOutputProcessor(gitStatusEntryFactory); - - NPath path = gitPath ?? defaultGitPath; - - var results = await new ProcessTask(CancellationToken.None, processor) - .Configure(processManager, path, "status -b -u --porcelain", workingDirectory, false) - .Start() - .Task; - - return results; - } - - public static async Task> GetGitRemoteEntries(this ProcessManager processManager, - NPath workingDirectory, - NPath? gitPath = null) - { - var processor = new RemoteListOutputProcessor(); - - NPath path = gitPath ?? defaultGitPath; - - var results = await new ProcessTaskWithListOutput(CancellationToken.None, processor) - .Configure(processManager, path, "remote -v", workingDirectory, false) - .Start() - .Task; - return results; - } - - public static async Task GetGitCreds(this ProcessManager processManager, - NPath workingDirectory, - IEnvironment environment, IFileSystem filesystem, IProcessEnvironment gitEnvironment, - NPath? gitPath = null) - { - var processor = new FirstNonNullLineOutputProcessor(); - - NPath path = gitPath ?? defaultGitPath; - - var task = new ProcessTask(CancellationToken.None, processor) - .Configure(processManager, path, "credential-wincred get", workingDirectory, true); - - task.OnStartProcess += p => - { - p.StandardInput.WriteLine("protocol=https"); - p.StandardInput.WriteLine("host=github.com"); - p.StandardInput.Close(); - }; - return await task.StartAsAsync(); - } - } -} diff --git a/src/tests/UnitTests/UI/TreeBaseTests.cs b/src/tests/UnitTests/UI/TreeBaseTests.cs index 94485a63c..60110e39c 100644 --- a/src/tests/UnitTests/UI/TreeBaseTests.cs +++ b/src/tests/UnitTests/UI/TreeBaseTests.cs @@ -35,6 +35,7 @@ public struct TestTreeData : ITreeData public string Path { get; set; } public bool IsActive { get; set; } + public bool IsChecked { get; set; } public override string ToString() { @@ -113,14 +114,13 @@ protected override void AddCheckedNode(TestTreeNode node) TestTreeListener.AddCheckedNode(node); } - protected override TestTreeNode CreateTreeNode(string path, string label, int level, bool isFolder, bool isActive, bool isHidden, bool isCollapsed, bool isChecked, TestTreeData? treeData, bool isContainer) + protected override TestTreeNode CreateTreeNode(string path, string label, int level, bool isFolder, bool isActive, bool isHidden, bool isCollapsed, bool isChecked, TestTreeData? treeData) { if (traceLogging) { Logger.Trace( - "CreateTreeNode(path: {0}, label: {1}, level: {2}, isFolder: {3}, " + - "isActive: {4}, isHidden: {5}, isCollapsed: {6}, isChecked: {7}, treeData: {8})", path, label, - level, isFolder, isActive, isHidden, isCollapsed, isChecked, treeData?.ToString() ?? "[NULL]"); + "CreateTreeNode(path: {0}, label: {1}, level: {2}, isFolder: {3}, isActive: {4}, isHidden: {5}, isCollapsed: {6}, isChecked: {7}, treeData: {8})", + path, label, level, isFolder, isActive, isHidden, isCollapsed, isChecked, treeData?.ToString() ?? "[NULL]"); } TestTreeListener.CreateTreeNode(path, label, level, isFolder, isActive, isHidden, isCollapsed, isChecked, @@ -186,6 +186,11 @@ public override TestTreeNode SelectedNode } } + public new void ToggleNodeChecked(int idx, TestTreeNode node) + { + base.ToggleNodeChecked(idx, node); + } + protected override List Nodes { get @@ -607,6 +612,211 @@ public void ShouldPopulateTreeWithSingleEntryWithMetaInPath() } }); } + + [Test] + public void ShouldCheckParentOfMetaFile() + { + var testTree = new TestTree(true); + var testTreeListener = testTree.TestTreeListener; + + testTreeListener.GetCollapsedFolders().Returns(new string[0]); + testTreeListener.SelectedNode.Returns((TestTreeNode)null); + testTreeListener.GetCheckedFiles().Returns(new string[0]); + testTreeListener.Nodes.Returns(new List()); + testTreeListener.PathSeparator.Returns(@"\"); + testTreeListener.DisplayRootNode.Returns(true); + testTreeListener.IsSelectable.Returns(false); + testTreeListener.Title.Returns("Test Tree"); + testTreeListener.PromoteMetaFiles.Returns(true); + + var testTreeData = new[] { + new TestTreeData { + Path = "Folder\\Default Scene.unity" + }, + new TestTreeData { + Path = "Folder\\Default Scene.unity.meta" + } + }; + testTree.Load(testTreeData); + + testTree.CreatedTreeNodes.ShouldAllBeEquivalentTo(new[] { + new TestTreeNode { + Path = "Test Tree", + Label = "Test Tree", + IsFolder = true + }, + new TestTreeNode { + Path = "Folder", + Label = "Folder", + Level = 1, + IsFolder = true + }, + new TestTreeNode { + Path = "Folder\\Default Scene.unity", + Label = "Default Scene.unity", + Level = 2, + TreeData = testTreeData[0], + IsContainer = true + }, + new TestTreeNode { + Path = "Folder\\Default Scene.unity.meta", + Label = "Default Scene.unity.meta", + Level = 3, + TreeData = testTreeData[1] + } + }); + + var sceneNode = testTree.CreatedTreeNodes[2]; + var sceneMetaNode = testTree.CreatedTreeNodes[3]; + + Assert.AreEqual(CheckState.Empty, sceneNode.CheckState); + Assert.AreEqual(CheckState.Empty, sceneMetaNode.CheckState); + + testTree.ToggleNodeChecked(3, sceneMetaNode); + + Assert.AreEqual(CheckState.Checked, sceneNode.CheckState); + Assert.AreEqual(CheckState.Checked, sceneMetaNode.CheckState); + + testTreeListener.Received(2).AddCheckedNode(Arg.Any()); + } + + [Test] + public void ShouldRippleChecksCorrectly() + { + var testTree = new TestTree(true); + var testTreeListener = testTree.TestTreeListener; + + testTreeListener.GetCollapsedFolders().Returns(new string[0]); + testTreeListener.SelectedNode.Returns((TestTreeNode)null); + testTreeListener.GetCheckedFiles().Returns(new string[0]); + testTreeListener.Nodes.Returns(new List()); + testTreeListener.PathSeparator.Returns(@"\"); + testTreeListener.DisplayRootNode.Returns(true); + testTreeListener.IsSelectable.Returns(false); + testTreeListener.Title.Returns("Test Tree"); + testTreeListener.PromoteMetaFiles.Returns(true); + + var testTreeData = new[] { + new TestTreeData { + Path = "Root\\Parent\\A.txt" + }, + new TestTreeData { + Path = "Root\\Parent\\B.txt" + }, + new TestTreeData { + Path = "Root\\Parent\\C.txt" + } + }; + + testTree.Load(testTreeData); + + testTree.CreatedTreeNodes.ShouldAllBeEquivalentTo(new[] { + new TestTreeNode { + Path = "Test Tree", + Label = "Test Tree", + IsFolder = true + }, + new TestTreeNode { + Path = "Root", + Label = "Root", + Level = 1, + IsFolder = true + }, + new TestTreeNode { + Path = "Root\\Parent", + Label = "Parent", + Level = 2, + IsFolder = true + }, + new TestTreeNode { + Path = "Root\\Parent\\A.txt", + Label = "A.txt", + Level = 3, + TreeData = testTreeData[0], + }, + new TestTreeNode { + Path = "Root\\Parent\\B.txt", + Label = "B.txt", + Level = 3, + TreeData = testTreeData[1], + }, + new TestTreeNode { + Path = "Root\\Parent\\C.txt", + Label = "C.txt", + Level = 3, + TreeData = testTreeData[2], + } + }); + + var rootNode = testTree.CreatedTreeNodes[1]; + var parentNode = testTree.CreatedTreeNodes[2]; + var aNode = testTree.CreatedTreeNodes[3]; + var bNode = testTree.CreatedTreeNodes[4]; + var cNode = testTree.CreatedTreeNodes[5]; + + // Initial state, everything unchecked + + Assert.AreEqual(CheckState.Empty, rootNode.CheckState); + Assert.AreEqual(CheckState.Empty, parentNode.CheckState); + Assert.AreEqual(CheckState.Empty, aNode.CheckState); + Assert.AreEqual(CheckState.Empty, bNode.CheckState); + Assert.AreEqual(CheckState.Empty, cNode.CheckState); + + testTree.ToggleNodeChecked(1, rootNode); + + // Checked the root node, everything checked + + Assert.AreEqual(CheckState.Checked, rootNode.CheckState); + Assert.AreEqual(CheckState.Checked, parentNode.CheckState); + Assert.AreEqual(CheckState.Checked, aNode.CheckState); + Assert.AreEqual(CheckState.Checked, bNode.CheckState); + Assert.AreEqual(CheckState.Checked, cNode.CheckState); + + testTreeListener.Received(3).AddCheckedNode(Arg.Any()); + testTreeListener.ClearReceivedCalls(); + + // Unchecked c.txt, c.txt unchecked, parents mixed + + testTree.ToggleNodeChecked(5, cNode); + + Assert.AreEqual(CheckState.Mixed, rootNode.CheckState); + Assert.AreEqual(CheckState.Mixed, parentNode.CheckState); + Assert.AreEqual(CheckState.Checked, aNode.CheckState); + Assert.AreEqual(CheckState.Checked, bNode.CheckState); + Assert.AreEqual(CheckState.Empty, cNode.CheckState); + + testTreeListener.Received(1).RemoveCheckedNode(Arg.Any()); + testTreeListener.ClearReceivedCalls(); + + testTree.ToggleNodeChecked(5, cNode); + + // Checked c.txt, everything checked + + Assert.AreEqual(CheckState.Checked, rootNode.CheckState); + Assert.AreEqual(CheckState.Checked, parentNode.CheckState); + Assert.AreEqual(CheckState.Checked, aNode.CheckState); + Assert.AreEqual(CheckState.Checked, bNode.CheckState); + Assert.AreEqual(CheckState.Checked, cNode.CheckState); + + testTreeListener.Received(1).AddCheckedNode(Arg.Any()); + testTreeListener.ClearReceivedCalls(); + + // Unchecked a.txt b.txt and c.txt, everything checked + + testTree.ToggleNodeChecked(3, aNode); + testTree.ToggleNodeChecked(4, bNode); + testTree.ToggleNodeChecked(5, cNode); + + Assert.AreEqual(CheckState.Empty, rootNode.CheckState); + Assert.AreEqual(CheckState.Empty, parentNode.CheckState); + Assert.AreEqual(CheckState.Empty, aNode.CheckState); + Assert.AreEqual(CheckState.Empty, bNode.CheckState); + Assert.AreEqual(CheckState.Empty, cNode.CheckState); + + testTreeListener.Received(3).RemoveCheckedNode(Arg.Any()); + testTreeListener.ClearReceivedCalls(); + } + [Test] public void ShouldPopulateTreeWithSingleEntryWithNonPromotedMetaInPath() { diff --git a/src/tests/UnitTests/UnitTests.csproj b/src/tests/UnitTests/UnitTests.csproj index bff3c8d57..20f755fe7 100644 --- a/src/tests/UnitTests/UnitTests.csproj +++ b/src/tests/UnitTests/UnitTests.csproj @@ -76,6 +76,7 @@ + @@ -97,7 +98,6 @@ - diff --git a/src/tests/UnitTests/UnitTests.v3.ncrunchproject b/src/tests/UnitTests/UnitTests.v3.ncrunchproject index d63e89685..d30a60555 100644 --- a/src/tests/UnitTests/UnitTests.v3.ncrunchproject +++ b/src/tests/UnitTests/UnitTests.v3.ncrunchproject @@ -1,9 +1,9 @@  - ..\..\..\lib\sfw.net\win\x64\Debug\**.* - ..\..\..\script\lib\UnityEditor.dll - ..\..\..\script\lib\UnityEngine.dll + ..\..\..\script\lib\Managed\UnityEditor.dll + ..\..\..\script\lib\Managed\UnityEngine.dll + ..\..\..\lib\sfw\win\x64\**.* PostBuildEventDisabled diff --git a/submodules/packaging b/submodules/packaging new file mode 160000 index 000000000..77c7f8765 --- /dev/null +++ b/submodules/packaging @@ -0,0 +1 @@ +Subproject commit 77c7f876594e73f09e594d5b0c426fdb1470d3bb diff --git a/unity/PackageProject/.gitignore b/unity/PackageProject/.gitignore index 01498ea52..aaecd01aa 100644 --- a/unity/PackageProject/.gitignore +++ b/unity/PackageProject/.gitignore @@ -9,10 +9,13 @@ *.dylib *.so *.bundle +*.cs ProjectVersion.txt Library/ // These files come from lib/ Assets/Plugins/GitHub/Editor/x64/ -Assets/Plugins/GitHub/Editor/x86/ \ No newline at end of file +Assets/Plugins/GitHub/Editor/x86/ +Assets/Plugins/GitHub/Editor/libsfw.bundle.meta +Assets/Plugins/GitHub/Editor/libsfw.so.meta \ No newline at end of file diff --git a/unity/PackageProject/Assets/Plugins/GitHub/Editor/AsyncBridge.Net35.dll.meta b/unity/PackageProject/Assets/Plugins/GitHub/Editor/AsyncBridge.Net35.dll.meta index 68688858e..1c1d85763 100644 --- a/unity/PackageProject/Assets/Plugins/GitHub/Editor/AsyncBridge.Net35.dll.meta +++ b/unity/PackageProject/Assets/Plugins/GitHub/Editor/AsyncBridge.Net35.dll.meta @@ -1,25 +1,34 @@ fileFormatVersion: 2 guid: d516f2a1bec6a9645a084ef8c9237132 timeCreated: 1491391262 -licenseType: Pro +licenseType: Free PluginImporter: - serializedVersion: 1 + serializedVersion: 2 iconMap: {} executionOrder: {} isPreloaded: 0 isOverridable: 0 platformData: - Any: - enabled: 0 - settings: {} - Editor: - enabled: 1 - settings: - DefaultValueInitialized: true - WindowsStoreApps: - enabled: 0 - settings: - CPU: AnyCPU + data: + first: + Any: + second: + enabled: 0 + settings: {} + data: + first: + Editor: Editor + second: + enabled: 0 + settings: + DefaultValueInitialized: true + data: + first: + Windows Store Apps: WindowsStoreApps + second: + enabled: 0 + settings: + CPU: AnyCPU userData: assetBundleName: assetBundleVariant: diff --git a/unity/PackageProject/Assets/Plugins/GitHub/Editor/ExtensionLoader.cs.meta b/unity/PackageProject/Assets/Plugins/GitHub/Editor/ExtensionLoader.cs.meta new file mode 100644 index 000000000..2060c1819 --- /dev/null +++ b/unity/PackageProject/Assets/Plugins/GitHub/Editor/ExtensionLoader.cs.meta @@ -0,0 +1,34 @@ +fileFormatVersion: 2 +guid: dae2ecee8a704dd59797e26554ff8606 +timeCreated: 1534504082 +licenseType: Free +PluginImporter: + serializedVersion: 2 + iconMap: {} + executionOrder: {} + isPreloaded: 0 + isOverridable: 0 + platformData: + data: + first: + Any: + second: + enabled: 0 + settings: {} + data: + first: + Editor: Editor + second: + enabled: 1 + settings: + DefaultValueInitialized: true + data: + first: + Windows Store Apps: WindowsStoreApps + second: + enabled: 0 + settings: + CPU: AnyCPU + userData: + assetBundleName: + assetBundleVariant: diff --git a/unity/PackageProject/Assets/Plugins/GitHub/Editor/GitHub.Api.45.dll.mdb.meta b/unity/PackageProject/Assets/Plugins/GitHub/Editor/GitHub.Api.45.dll.mdb.meta new file mode 100644 index 000000000..d484c54df --- /dev/null +++ b/unity/PackageProject/Assets/Plugins/GitHub/Editor/GitHub.Api.45.dll.mdb.meta @@ -0,0 +1,8 @@ +fileFormatVersion: 2 +guid: 181f65fb096cedd4493bc9971257d8b1 +timeCreated: 1534516893 +licenseType: Free +DefaultImporter: + userData: + assetBundleName: + assetBundleVariant: diff --git a/unity/PackageProject/Assets/Plugins/GitHub/Editor/GitHub.Api.45.dll.meta b/unity/PackageProject/Assets/Plugins/GitHub/Editor/GitHub.Api.45.dll.meta new file mode 100644 index 000000000..3b1911219 --- /dev/null +++ b/unity/PackageProject/Assets/Plugins/GitHub/Editor/GitHub.Api.45.dll.meta @@ -0,0 +1,34 @@ +fileFormatVersion: 2 +guid: c743ae24ee231884887054d20ccdd0ab +timeCreated: 1534504082 +licenseType: Free +PluginImporter: + serializedVersion: 2 + iconMap: {} + executionOrder: {} + isPreloaded: 0 + isOverridable: 0 + platformData: + data: + first: + Any: + second: + enabled: 0 + settings: {} + data: + first: + Editor: Editor + second: + enabled: 0 + settings: + DefaultValueInitialized: true + data: + first: + Windows Store Apps: WindowsStoreApps + second: + enabled: 0 + settings: + CPU: AnyCPU + userData: + assetBundleName: + assetBundleVariant: diff --git a/unity/PackageProject/Assets/Plugins/GitHub/Editor/GitHub.Api.dll.mdb.meta b/unity/PackageProject/Assets/Plugins/GitHub/Editor/GitHub.Api.dll.mdb.meta index ce6d7087f..9b74d73ee 100644 --- a/unity/PackageProject/Assets/Plugins/GitHub/Editor/GitHub.Api.dll.mdb.meta +++ b/unity/PackageProject/Assets/Plugins/GitHub/Editor/GitHub.Api.dll.mdb.meta @@ -1,7 +1,7 @@ fileFormatVersion: 2 guid: c5c83d14802e712408f23409f3c59e26 timeCreated: 1493304320 -licenseType: Pro +licenseType: Free DefaultImporter: userData: assetBundleName: diff --git a/unity/PackageProject/Assets/Plugins/GitHub/Editor/GitHub.Api.dll.meta b/unity/PackageProject/Assets/Plugins/GitHub/Editor/GitHub.Api.dll.meta index aae704762..d12a12326 100644 --- a/unity/PackageProject/Assets/Plugins/GitHub/Editor/GitHub.Api.dll.meta +++ b/unity/PackageProject/Assets/Plugins/GitHub/Editor/GitHub.Api.dll.meta @@ -1,25 +1,34 @@ fileFormatVersion: 2 guid: c743ae24ee231884887054d20ccdd0ae timeCreated: 1491391261 -licenseType: Pro +licenseType: Free PluginImporter: - serializedVersion: 1 + serializedVersion: 2 iconMap: {} executionOrder: {} isPreloaded: 0 isOverridable: 0 platformData: - Any: - enabled: 0 - settings: {} - Editor: - enabled: 1 - settings: - DefaultValueInitialized: true - WindowsStoreApps: - enabled: 0 - settings: - CPU: AnyCPU + data: + first: + Any: + second: + enabled: 0 + settings: {} + data: + first: + Editor: Editor + second: + enabled: 0 + settings: + DefaultValueInitialized: true + data: + first: + Windows Store Apps: WindowsStoreApps + second: + enabled: 0 + settings: + CPU: AnyCPU userData: assetBundleName: assetBundleVariant: diff --git a/unity/PackageProject/Assets/Plugins/GitHub/Editor/GitHub.Logging.dll.mdb.meta b/unity/PackageProject/Assets/Plugins/GitHub/Editor/GitHub.Logging.dll.mdb.meta index 8ce5af844..d51a20eaf 100644 --- a/unity/PackageProject/Assets/Plugins/GitHub/Editor/GitHub.Logging.dll.mdb.meta +++ b/unity/PackageProject/Assets/Plugins/GitHub/Editor/GitHub.Logging.dll.mdb.meta @@ -1,7 +1,7 @@ fileFormatVersion: 2 guid: 23c8bee69b591054094d32918f98facd timeCreated: 1493304320 -licenseType: Pro +licenseType: Free DefaultImporter: userData: assetBundleName: diff --git a/unity/PackageProject/Assets/Plugins/GitHub/Editor/GitHub.Logging.dll.meta b/unity/PackageProject/Assets/Plugins/GitHub/Editor/GitHub.Logging.dll.meta index 3d04e1950..46ab48b16 100644 --- a/unity/PackageProject/Assets/Plugins/GitHub/Editor/GitHub.Logging.dll.meta +++ b/unity/PackageProject/Assets/Plugins/GitHub/Editor/GitHub.Logging.dll.meta @@ -1,25 +1,34 @@ fileFormatVersion: 2 guid: 15ca2bebf173f2d4484686a03a45b56d timeCreated: 1491391259 -licenseType: Pro +licenseType: Free PluginImporter: - serializedVersion: 1 + serializedVersion: 2 iconMap: {} executionOrder: {} isPreloaded: 0 isOverridable: 0 platformData: - Any: - enabled: 0 - settings: {} - Editor: - enabled: 1 - settings: - DefaultValueInitialized: true - WindowsStoreApps: - enabled: 0 - settings: - CPU: AnyCPU + data: + first: + Any: + second: + enabled: 0 + settings: {} + data: + first: + Editor: Editor + second: + enabled: 1 + settings: + DefaultValueInitialized: true + data: + first: + Windows Store Apps: WindowsStoreApps + second: + enabled: 0 + settings: + CPU: AnyCPU userData: assetBundleName: assetBundleVariant: diff --git a/unity/PackageProject/Assets/Plugins/GitHub/Editor/GitHub.Unity.45.dll.mdb.meta b/unity/PackageProject/Assets/Plugins/GitHub/Editor/GitHub.Unity.45.dll.mdb.meta new file mode 100644 index 000000000..bc30ebed8 --- /dev/null +++ b/unity/PackageProject/Assets/Plugins/GitHub/Editor/GitHub.Unity.45.dll.mdb.meta @@ -0,0 +1,8 @@ +fileFormatVersion: 2 +guid: 877ef3585573ce44eab899298be23158 +timeCreated: 1534516893 +licenseType: Free +DefaultImporter: + userData: + assetBundleName: + assetBundleVariant: diff --git a/unity/PackageProject/Assets/Plugins/GitHub/Editor/GitHub.Unity.45.dll.meta b/unity/PackageProject/Assets/Plugins/GitHub/Editor/GitHub.Unity.45.dll.meta new file mode 100644 index 000000000..1fcd625de --- /dev/null +++ b/unity/PackageProject/Assets/Plugins/GitHub/Editor/GitHub.Unity.45.dll.meta @@ -0,0 +1,34 @@ +fileFormatVersion: 2 +guid: 68c7e4565cde54155bb78d8e935f1ddb +timeCreated: 1534504082 +licenseType: Free +PluginImporter: + serializedVersion: 2 + iconMap: {} + executionOrder: {} + isPreloaded: 0 + isOverridable: 0 + platformData: + data: + first: + Any: + second: + enabled: 0 + settings: {} + data: + first: + Editor: Editor + second: + enabled: 0 + settings: + DefaultValueInitialized: true + data: + first: + Windows Store Apps: WindowsStoreApps + second: + enabled: 0 + settings: + CPU: AnyCPU + userData: + assetBundleName: + assetBundleVariant: diff --git a/unity/PackageProject/Assets/Plugins/GitHub/Editor/GitHub.Unity.dll.meta b/unity/PackageProject/Assets/Plugins/GitHub/Editor/GitHub.Unity.dll.meta index 2187c3a65..a70aca527 100644 --- a/unity/PackageProject/Assets/Plugins/GitHub/Editor/GitHub.Unity.dll.meta +++ b/unity/PackageProject/Assets/Plugins/GitHub/Editor/GitHub.Unity.dll.meta @@ -19,7 +19,7 @@ PluginImporter: first: Editor: Editor second: - enabled: 1 + enabled: 0 settings: DefaultValueInitialized: true data: diff --git a/unity/PackageProject/Assets/Plugins/GitHub/Editor/GitHub.UnityShim.dll.mdb.meta b/unity/PackageProject/Assets/Plugins/GitHub/Editor/GitHub.UnityShim.dll.mdb.meta new file mode 100644 index 000000000..d34a107fc --- /dev/null +++ b/unity/PackageProject/Assets/Plugins/GitHub/Editor/GitHub.UnityShim.dll.mdb.meta @@ -0,0 +1,7 @@ +fileFormatVersion: 2 +guid: 1186e0491049e40ba8e7e19e418d9e8e +DefaultImporter: + externalObjects: {} + userData: + assetBundleName: + assetBundleVariant: diff --git a/unity/PackageProject/Assets/Plugins/GitHub/Editor/GitHub.UnityShim.dll.meta b/unity/PackageProject/Assets/Plugins/GitHub/Editor/GitHub.UnityShim.dll.meta new file mode 100644 index 000000000..1aeac2004 --- /dev/null +++ b/unity/PackageProject/Assets/Plugins/GitHub/Editor/GitHub.UnityShim.dll.meta @@ -0,0 +1,30 @@ +fileFormatVersion: 2 +guid: 863e1b9976c4e46d29bf83928b3a8ab2 +PluginImporter: + externalObjects: {} + serializedVersion: 2 + iconMap: {} + executionOrder: {} + isPreloaded: 0 + isOverridable: 0 + platformData: + - first: + Any: + second: + enabled: 0 + settings: {} + - first: + Editor: Editor + second: + enabled: 1 + settings: + DefaultValueInitialized: true + - first: + Windows Store Apps: WindowsStoreApps + second: + enabled: 0 + settings: + CPU: AnyCPU + userData: + assetBundleName: + assetBundleVariant: diff --git a/unity/PackageProject/Assets/Plugins/GitHub/Editor/ICSharpCode.SharpZipLib.dll.meta b/unity/PackageProject/Assets/Plugins/GitHub/Editor/ICSharpCode.SharpZipLib.dll.meta deleted file mode 100644 index 8abfa3131..000000000 --- a/unity/PackageProject/Assets/Plugins/GitHub/Editor/ICSharpCode.SharpZipLib.dll.meta +++ /dev/null @@ -1,25 +0,0 @@ -fileFormatVersion: 2 -guid: ecfb28d906a32914d956497c8d3b3395 -timeCreated: 1493304328 -licenseType: Pro -PluginImporter: - serializedVersion: 1 - iconMap: {} - executionOrder: {} - isPreloaded: 0 - isOverridable: 0 - platformData: - Any: - enabled: 0 - settings: {} - Editor: - enabled: 1 - settings: - DefaultValueInitialized: true - WindowsStoreApps: - enabled: 0 - settings: - CPU: AnyCPU - userData: - assetBundleName: - assetBundleVariant: diff --git a/unity/PackageProject/Assets/Plugins/GitHub/Editor/Mono.Posix.dll.meta b/unity/PackageProject/Assets/Plugins/GitHub/Editor/Mono.Posix.dll.meta index 7f984fd53..7a71232e5 100644 --- a/unity/PackageProject/Assets/Plugins/GitHub/Editor/Mono.Posix.dll.meta +++ b/unity/PackageProject/Assets/Plugins/GitHub/Editor/Mono.Posix.dll.meta @@ -19,7 +19,7 @@ PluginImporter: first: Editor: Editor second: - enabled: 1 + enabled: 0 settings: DefaultValueInitialized: true data: diff --git a/unity/PackageProject/Assets/Plugins/GitHub/Editor/ReadOnlyCollectionsInterfaces.dll.meta b/unity/PackageProject/Assets/Plugins/GitHub/Editor/ReadOnlyCollectionsInterfaces.dll.meta index ad958912e..98b231bec 100644 --- a/unity/PackageProject/Assets/Plugins/GitHub/Editor/ReadOnlyCollectionsInterfaces.dll.meta +++ b/unity/PackageProject/Assets/Plugins/GitHub/Editor/ReadOnlyCollectionsInterfaces.dll.meta @@ -1,25 +1,34 @@ fileFormatVersion: 2 guid: 48c22d5d7479fcb49ab3be0cdd2ccec0 timeCreated: 1491391260 -licenseType: Pro +licenseType: Free PluginImporter: - serializedVersion: 1 + serializedVersion: 2 iconMap: {} executionOrder: {} isPreloaded: 0 isOverridable: 0 platformData: - Any: - enabled: 0 - settings: {} - Editor: - enabled: 1 - settings: - DefaultValueInitialized: true - WindowsStoreApps: - enabled: 0 - settings: - CPU: AnyCPU + data: + first: + Any: + second: + enabled: 0 + settings: {} + data: + first: + Editor: Editor + second: + enabled: 0 + settings: + DefaultValueInitialized: true + data: + first: + Windows Store Apps: WindowsStoreApps + second: + enabled: 0 + settings: + CPU: AnyCPU userData: assetBundleName: assetBundleVariant: diff --git a/unity/PackageProject/Assets/Plugins/GitHub/Editor/System.Threading.dll.meta b/unity/PackageProject/Assets/Plugins/GitHub/Editor/System.Threading.dll.meta index 9f8232768..ea6a32d4c 100644 --- a/unity/PackageProject/Assets/Plugins/GitHub/Editor/System.Threading.dll.meta +++ b/unity/PackageProject/Assets/Plugins/GitHub/Editor/System.Threading.dll.meta @@ -1,25 +1,34 @@ fileFormatVersion: 2 guid: 790749ba7e4b18141953e39cb13f1b79 timeCreated: 1491392717 -licenseType: Pro +licenseType: Free PluginImporter: - serializedVersion: 1 + serializedVersion: 2 iconMap: {} executionOrder: {} isPreloaded: 0 isOverridable: 0 platformData: - Any: - enabled: 0 - settings: {} - Editor: - enabled: 1 - settings: - DefaultValueInitialized: true - WindowsStoreApps: - enabled: 0 - settings: - CPU: AnyCPU + data: + first: + Any: + second: + enabled: 0 + settings: {} + data: + first: + Editor: Editor + second: + enabled: 0 + settings: + DefaultValueInitialized: true + data: + first: + Windows Store Apps: WindowsStoreApps + second: + enabled: 0 + settings: + CPU: AnyCPU userData: assetBundleName: assetBundleVariant: diff --git a/unity/PackageProject/Assets/Plugins/GitHub/Editor/UnityAPIWrapper.cs.meta b/unity/PackageProject/Assets/Plugins/GitHub/Editor/UnityAPIWrapper.cs.meta new file mode 100644 index 000000000..c87218b83 --- /dev/null +++ b/unity/PackageProject/Assets/Plugins/GitHub/Editor/UnityAPIWrapper.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: 555cd6f54c03341b1970d950df1a5ee5 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/unity/PackageProject/Assets/Plugins/GitHub/Editor/libsfw.bundle.meta b/unity/PackageProject/Assets/Plugins/GitHub/Editor/libsfw.bundle.meta deleted file mode 100644 index 907426232..000000000 --- a/unity/PackageProject/Assets/Plugins/GitHub/Editor/libsfw.bundle.meta +++ /dev/null @@ -1,106 +0,0 @@ -fileFormatVersion: 2 -guid: 636d33ae594884e7d80b569f429d245d -timeCreated: 1503667182 -licenseType: Free -PluginImporter: - serializedVersion: 2 - iconMap: {} - executionOrder: {} - isPreloaded: 0 - isOverridable: 0 - platformData: - data: - first: - '': Any - second: - enabled: 0 - settings: - Exclude Editor: 0 - Exclude Linux: 1 - Exclude Linux64: 1 - Exclude LinuxUniversal: 1 - Exclude OSXIntel: 1 - Exclude OSXIntel64: 1 - Exclude OSXUniversal: 1 - Exclude Win: 1 - Exclude Win64: 1 - data: - first: - '': Editor - second: - enabled: 0 - settings: - CPU: AnyCPU - OS: OSX - data: - first: - Any: - second: - enabled: 0 - settings: {} - data: - first: - Editor: Editor - second: - enabled: 1 - settings: - DefaultValueInitialized: true - data: - first: - Facebook: Win - second: - enabled: 0 - settings: - CPU: AnyCPU - data: - first: - Facebook: Win64 - second: - enabled: 0 - settings: - CPU: AnyCPU - data: - first: - Standalone: Linux - second: - enabled: 0 - settings: - CPU: x86 - data: - first: - Standalone: Linux64 - second: - enabled: 0 - settings: - CPU: x86_64 - data: - first: - Standalone: OSXIntel - second: - enabled: 0 - settings: - CPU: AnyCPU - data: - first: - Standalone: OSXIntel64 - second: - enabled: 0 - settings: - CPU: AnyCPU - data: - first: - Standalone: Win - second: - enabled: 0 - settings: - CPU: AnyCPU - data: - first: - Standalone: Win64 - second: - enabled: 0 - settings: - CPU: AnyCPU - userData: - assetBundleName: - assetBundleVariant: diff --git a/unity/PackageProject/Assets/Plugins/GitHub/Editor/libsfw.so.meta b/unity/PackageProject/Assets/Plugins/GitHub/Editor/libsfw.so.meta deleted file mode 100644 index f5ba9573e..000000000 --- a/unity/PackageProject/Assets/Plugins/GitHub/Editor/libsfw.so.meta +++ /dev/null @@ -1,106 +0,0 @@ -fileFormatVersion: 2 -guid: 21206c65839f84d0e9ae14bc1fdc68db -timeCreated: 1503931807 -licenseType: Pro -PluginImporter: - serializedVersion: 2 - iconMap: {} - executionOrder: {} - isPreloaded: 0 - isOverridable: 0 - platformData: - data: - first: - '': Any - second: - enabled: 0 - settings: - Exclude Editor: 0 - Exclude Linux: 1 - Exclude Linux64: 1 - Exclude LinuxUniversal: 1 - Exclude OSXIntel: 1 - Exclude OSXIntel64: 1 - Exclude OSXUniversal: 1 - Exclude Win: 1 - Exclude Win64: 1 - data: - first: - '': Editor - second: - enabled: 0 - settings: - CPU: AnyCPU - OS: Linux - data: - first: - Any: - second: - enabled: 0 - settings: {} - data: - first: - Editor: Editor - second: - enabled: 1 - settings: - DefaultValueInitialized: true - data: - first: - Facebook: Win - second: - enabled: 0 - settings: - CPU: AnyCPU - data: - first: - Facebook: Win64 - second: - enabled: 0 - settings: - CPU: AnyCPU - data: - first: - Standalone: Linux - second: - enabled: 0 - settings: - CPU: x86 - data: - first: - Standalone: Linux64 - second: - enabled: 0 - settings: - CPU: x86_64 - data: - first: - Standalone: OSXIntel - second: - enabled: 0 - settings: - CPU: AnyCPU - data: - first: - Standalone: OSXIntel64 - second: - enabled: 0 - settings: - CPU: AnyCPU - data: - first: - Standalone: Win - second: - enabled: 0 - settings: - CPU: AnyCPU - data: - first: - Standalone: Win64 - second: - enabled: 0 - settings: - CPU: AnyCPU - userData: - assetBundleName: - assetBundleVariant: diff --git a/unity/PackageProject/Assets/Plugins/GitHub/Editor/sfw.net.dll.meta b/unity/PackageProject/Assets/Plugins/GitHub/Editor/sfw.net.dll.meta index a749fc943..11b151c05 100644 --- a/unity/PackageProject/Assets/Plugins/GitHub/Editor/sfw.net.dll.meta +++ b/unity/PackageProject/Assets/Plugins/GitHub/Editor/sfw.net.dll.meta @@ -1,25 +1,34 @@ fileFormatVersion: 2 guid: f9fc9b08ecd899944adf9860b4abd6b6 timeCreated: 1491392718 -licenseType: Pro +licenseType: Free PluginImporter: - serializedVersion: 1 + serializedVersion: 2 iconMap: {} executionOrder: {} isPreloaded: 0 isOverridable: 0 platformData: - Any: - enabled: 0 - settings: {} - Editor: - enabled: 1 - settings: - DefaultValueInitialized: true - WindowsStoreApps: - enabled: 0 - settings: - CPU: AnyCPU + data: + first: + Any: + second: + enabled: 0 + settings: {} + data: + first: + Editor: Editor + second: + enabled: 1 + settings: + DefaultValueInitialized: true + data: + first: + Windows Store Apps: WindowsStoreApps + second: + enabled: 0 + settings: + CPU: AnyCPU userData: assetBundleName: assetBundleVariant: diff --git a/unity/PackageProject/preview.png b/unity/PackageProject/preview.png new file mode 100755 index 000000000..926bb3196 --- /dev/null +++ b/unity/PackageProject/preview.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:82a8bda13a627b1bdb25d6cfd2abeedbc190b8ab5ba8a67a785e929f6967fccb +size 9704