diff --git a/.editorconfig b/.editorconfig new file mode 100644 index 0000000..6cf20e3 --- /dev/null +++ b/.editorconfig @@ -0,0 +1,259 @@ +[*] +charset=utf-8 +# We don't specify so people can use their native +# end_of_line = crlf +trim_trailing_whitespace=true +insert_final_newline=true +indent_style=space +indent_size=4 +tab_width=4 +max_line_length=125 + +# Microsoft .NET properties +csharp_new_line_before_members_in_object_initializers=false +csharp_preferred_modifier_order=public, private, protected, internal, new, abstract, virtual, sealed, override, static, readonly, extern, unsafe, volatile, async:suggestion +csharp_prefer_braces=true:suggestion +csharp_space_after_cast=false +dotnet_diagnostic.bc42024.severity=suggestion +dotnet_diagnostic.cs0078.severity=none +dotnet_diagnostic.cs0108.severity=none +dotnet_diagnostic.cs0109.severity=none +dotnet_diagnostic.cs0114.severity=none +dotnet_diagnostic.cs0162.severity=none +dotnet_diagnostic.cs0168.severity=suggestion +dotnet_diagnostic.cs0183.severity=none +dotnet_diagnostic.cs0184.severity=none +dotnet_diagnostic.cs0197.severity=none +dotnet_diagnostic.cs0252.severity=none +dotnet_diagnostic.cs0253.severity=none +dotnet_diagnostic.cs0420.severity=none +dotnet_diagnostic.cs0465.severity=none +dotnet_diagnostic.cs0469.severity=none +dotnet_diagnostic.cs0612.severity=none +dotnet_diagnostic.cs0618.severity=none +dotnet_diagnostic.cs0628.severity=none +dotnet_diagnostic.cs0642.severity=none +dotnet_diagnostic.cs0657.severity=none +dotnet_diagnostic.cs0658.severity=none +dotnet_diagnostic.cs0659.severity=none +dotnet_diagnostic.cs0660.severity=none +dotnet_diagnostic.cs0661.severity=none +dotnet_diagnostic.cs0665.severity=none +dotnet_diagnostic.cs0672.severity=none +dotnet_diagnostic.cs0693.severity=none +dotnet_diagnostic.cs1030.severity=none +dotnet_diagnostic.cs1058.severity=none +dotnet_diagnostic.cs1066.severity=none +dotnet_diagnostic.cs1522.severity=none +dotnet_diagnostic.cs1570.severity=none +dotnet_diagnostic.cs1571.severity=none +dotnet_diagnostic.cs1572.severity=none +dotnet_diagnostic.cs1573.severity=none +dotnet_diagnostic.cs1574.severity=none +dotnet_diagnostic.cs1580.severity=none +dotnet_diagnostic.cs1581.severity=none +dotnet_diagnostic.cs1584.severity=none +dotnet_diagnostic.cs1587.severity=none +dotnet_diagnostic.cs1589.severity=none +dotnet_diagnostic.cs1590.severity=none +dotnet_diagnostic.cs1591.severity=none +dotnet_diagnostic.cs1592.severity=none +dotnet_diagnostic.cs1710.severity=none +dotnet_diagnostic.cs1711.severity=none +dotnet_diagnostic.cs1712.severity=none +dotnet_diagnostic.cs1717.severity=none +dotnet_diagnostic.cs1723.severity=none +dotnet_diagnostic.cs1957.severity=none +dotnet_diagnostic.cs1981.severity=none +dotnet_diagnostic.cs1998.severity=none +dotnet_diagnostic.cs4014.severity=none +dotnet_diagnostic.cs7095.severity=none +dotnet_diagnostic.cs8094.severity=none +dotnet_diagnostic.cs8123.severity=none +dotnet_diagnostic.cs8383.severity=none +dotnet_diagnostic.cs8424.severity=none +dotnet_diagnostic.cs8425.severity=none +dotnet_diagnostic.cs8509.severity=none +dotnet_diagnostic.cs8597.severity=none +dotnet_diagnostic.cs8600.severity=none +dotnet_diagnostic.cs8601.severity=none +dotnet_diagnostic.cs8602.severity=none +dotnet_diagnostic.cs8603.severity=none +dotnet_diagnostic.cs8604.severity=none +dotnet_diagnostic.cs8605.severity=none +dotnet_diagnostic.cs8607.severity=none +dotnet_diagnostic.cs8608.severity=none +dotnet_diagnostic.cs8609.severity=none +dotnet_diagnostic.cs8610.severity=none +dotnet_diagnostic.cs8611.severity=none +dotnet_diagnostic.cs8612.severity=none +dotnet_diagnostic.cs8613.severity=none +dotnet_diagnostic.cs8614.severity=none +dotnet_diagnostic.cs8615.severity=none +dotnet_diagnostic.cs8616.severity=none +dotnet_diagnostic.cs8617.severity=none +dotnet_diagnostic.cs8618.severity=none +dotnet_diagnostic.cs8619.severity=none +dotnet_diagnostic.cs8620.severity=none +dotnet_diagnostic.cs8621.severity=none +dotnet_diagnostic.cs8622.severity=none +dotnet_diagnostic.cs8624.severity=none +dotnet_diagnostic.cs8625.severity=none +dotnet_diagnostic.cs8629.severity=none +dotnet_diagnostic.cs8631.severity=none +dotnet_diagnostic.cs8632.severity=none +dotnet_diagnostic.cs8633.severity=none +dotnet_diagnostic.cs8634.severity=none +dotnet_diagnostic.cs8643.severity=none +dotnet_diagnostic.cs8644.severity=none +dotnet_diagnostic.cs8645.severity=none +dotnet_diagnostic.cs8655.severity=none +dotnet_diagnostic.cs8656.severity=none +dotnet_diagnostic.cs8667.severity=none +dotnet_diagnostic.cs8670.severity=none +dotnet_diagnostic.cs8714.severity=none +dotnet_diagnostic.cs8762.severity=none +dotnet_diagnostic.cs8763.severity=none +dotnet_diagnostic.cs8764.severity=none +dotnet_diagnostic.cs8765.severity=none +dotnet_diagnostic.cs8766.severity=none +dotnet_diagnostic.cs8767.severity=none +dotnet_diagnostic.cs8768.severity=none +dotnet_diagnostic.cs8769.severity=none +dotnet_diagnostic.cs8770.severity=none +dotnet_diagnostic.cs8774.severity=none +dotnet_diagnostic.cs8775.severity=none +dotnet_diagnostic.cs8776.severity=none +dotnet_diagnostic.cs8777.severity=none +dotnet_style_parentheses_in_arithmetic_binary_operators=never_if_unnecessary:none +dotnet_style_parentheses_in_other_binary_operators=never_if_unnecessary:none +dotnet_style_parentheses_in_relational_binary_operators=never_if_unnecessary:none +dotnet_style_predefined_type_for_locals_parameters_members=true:suggestion +dotnet_style_predefined_type_for_member_access=true:warning +dotnet_style_qualification_for_event=false:suggestion +dotnet_style_qualification_for_field=false:suggestion +dotnet_style_qualification_for_method=false:suggestion +dotnet_style_qualification_for_property=false:suggestion +dotnet_style_require_accessibility_modifiers=for_non_interface_members:suggestion + +# ReSharper properties +resharper_apply_auto_detected_rules=false +resharper_braces_redundant=true +resharper_case_block_braces=next_line_shifted_2 +resharper_constructor_or_destructor_body=expression_body +resharper_csharp_int_align_comments=true +resharper_csharp_stick_comment=false +resharper_csharp_wrap_after_declaration_lpar=true +resharper_csharp_wrap_after_invocation_lpar=true +resharper_csharp_wrap_arguments_style=chop_if_long +resharper_csharp_wrap_before_declaration_rpar=true +resharper_csharp_wrap_before_invocation_rpar=true +# resharper_int_align_assignments=true +# resharper_int_align_variables=true +resharper_int_align_switch_expressions=true +resharper_int_align_switch_sections=true +resharper_max_enum_members_on_line=1 +resharper_wrap_before_extends_colon=true +# Nested statements +resharper_indent_nested_fixed_stmt=true +resharper_indent_nested_foreach_stmt=true +resharper_indent_nested_for_stmt=true +resharper_indent_nested_lock_stmt=true +resharper_indent_nested_usings_stmt=true +resharper_indent_nested_while_stmt=true + +resharper_local_function_body=expression_body +resharper_max_array_initializer_elements_on_line=10 +resharper_method_or_operator_body=expression_body +resharper_nested_ternary_style=compact +resharper_space_after_cast=false +resharper_space_within_single_line_array_initializer_braces=true +resharper_use_indent_from_vs=false +resharper_wrap_chained_method_calls=chop_if_long +resharper_wrap_lines=true + +# ReSharper inspection severities https://www.jetbrains.com/help/resharper/EditorConfig_Index.html +resharper_arrange_constructor_or_destructor_body_highlighting=suggestion +resharper_arrange_local_function_body_highlighting=suggestion +resharper_arrange_method_or_operator_body_highlighting=suggestion +resharper_arrange_redundant_parentheses_highlighting=suggestion +resharper_arrange_this_qualifier_highlighting=hint +resharper_arrange_type_member_modifiers_highlighting=hint +resharper_arrange_type_modifiers_highlighting=hint +resharper_bad_control_braces_indent_highlighting=warning +resharper_bad_declaration_braces_indent_highlighting=warning +resharper_bad_expression_braces_indent_highlighting=warning +resharper_bad_namespace_braces_indent_highlighting=warning +resharper_bad_preprocessor_indent_highlighting=warning +resharper_built_in_type_reference_style_highlighting=hint +resharper_check_namespace_highlighting=hint +resharper_compare_of_floats_by_equality_operator_highlighting=suggestion +resharper_convert_conditional_ternary_expression_to_switch_expression_highlighting=warning +resharper_convert_if_statement_to_switch_statement_highlighting=none +resharper_convert_switch_statement_to_switch_expression_highlighting=warning +resharper_foreach_can_be_partly_converted_to_query_using_another_get_enumerator_highlighting=none +resharper_for_can_be_converted_to_foreach_highlighting=none +resharper_inconsistent_naming_highlighting=none +resharper_invert_if_highlighting=none +resharper_member_can_be_private_global_highlighting=none +resharper_member_can_be_private_local_highlighting=none +resharper_member_can_be_protected_global_highlighting=none +resharper_member_can_be_protected_local_highlighting=none +resharper_merge_conditional_expression_highlighting=warning +resharper_merge_conditional_expression_when_possible_highlighting=warning +resharper_merge_sequential_checks_highlighting=warning +resharper_merge_sequential_checks_when_possible_highlighting=warning +resharper_missing_indent_highlighting=warning +resharper_outdent_is_off_prev_level_highlighting=warning +resharper_possible_null_reference_exception_highlighting=error +resharper_redundant_base_qualifier_highlighting=warning +resharper_redundant_empty_object_creation_argument_list_highlighting=warning +resharper_redundant_type_specification_in_default_expression_highlighting=warning +resharper_remove_redundant_braces_highlighting=warning +resharper_remove_redundant_or_statement_false_highlighting=warning +resharper_remove_redundant_or_statement_true_highlighting=warning +resharper_specify_a_culture_in_string_conversion_explicitly_highlighting=hint +resharper_string_compare_is_culture_specific_1_highlighting=hint +resharper_string_compare_is_culture_specific_2_highlighting=hint +resharper_string_compare_is_culture_specific_3_highlighting=hint +resharper_string_compare_is_culture_specific_4_highlighting=hint +resharper_string_compare_is_culture_specific_5_highlighting=hint +resharper_string_compare_is_culture_specific_6_highlighting=hint +resharper_string_compare_to_is_culture_specific_highlighting=hint +resharper_string_index_of_is_culture_specific_1_highlighting=hint +resharper_string_index_of_is_culture_specific_2_highlighting=hint +resharper_string_index_of_is_culture_specific_3_highlighting=hint +resharper_string_last_index_of_is_culture_specific_1_highlighting=hint +resharper_string_last_index_of_is_culture_specific_2_highlighting=hint +resharper_string_last_index_of_is_culture_specific_3_highlighting=hint +resharper_suggest_discard_declaration_var_style_highlighting=none +resharper_suggest_var_or_type_built_in_types_highlighting=none +resharper_suggest_var_or_type_elsewhere_highlighting=none +resharper_suggest_var_or_type_simple_types_highlighting=none +resharper_tabs_and_spaces_mismatch_highlighting=warning +resharper_unused_auto_property_accessor_global_highlighting=none +resharper_unused_auto_property_accessor_local_highlighting=none +resharper_unused_variable_highlighting=suggestion +resharper_virtual_member_call_in_constructor_highlighting=hint +resharper_web_config_module_not_resolved_highlighting=warning +resharper_web_config_type_not_resolved_highlighting=warning +resharper_web_config_wrong_module_highlighting=warning +resharper_wrong_indent_size_highlighting=warning + +# ReSharper inspection severities +resharper_enforce_do_while_statement_braces_highlighting=hint +resharper_enforce_fixed_statement_braces_highlighting=hint +resharper_enforce_foreach_statement_braces_highlighting=hint +resharper_enforce_for_statement_braces_highlighting=hint +resharper_enforce_if_statement_braces_highlighting=hint +resharper_enforce_lock_statement_braces_highlighting=hint +resharper_enforce_using_statement_braces_highlighting=hint +resharper_enforce_while_statement_braces_highlighting=hint + +[*.{sln,csproj,bat}] +end_of_line=crlf + +[*.{yml,json,md,sh,css,html}] +end_of_line=lf +indent_size=2 +tab_width=2 diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..5e57f18 --- /dev/null +++ b/.gitignore @@ -0,0 +1,484 @@ +## Ignore Visual Studio temporary files, build results, and +## files generated by popular Visual Studio add-ons. +## +## Get latest from `dotnet new gitignore` + +# dotenv files +.env + +# User-specific files +*.rsuser +*.suo +*.user +*.userosscache +*.sln.docstates + +# User-specific files (MonoDevelop/Xamarin Studio) +*.userprefs + +# Mono auto generated files +mono_crash.* + +# Build results +[Dd]ebug/ +[Dd]ebugPublic/ +[Rr]elease/ +[Rr]eleases/ +x64/ +x86/ +[Ww][Ii][Nn]32/ +[Aa][Rr][Mm]/ +[Aa][Rr][Mm]64/ +bld/ +[Bb]in/ +[Oo]bj/ +[Ll]og/ +[Ll]ogs/ + +# Visual Studio 2015/2017 cache/options directory +.vs/ +# Uncomment if you have tasks that create the project's static files in wwwroot +#wwwroot/ + +# Visual Studio 2017 auto generated files +Generated\ Files/ + +# MSTest test Results +[Tt]est[Rr]esult*/ +[Bb]uild[Ll]og.* + +# NUnit +*.VisualState.xml +TestResult.xml +nunit-*.xml + +# Build Results of an ATL Project +[Dd]ebugPS/ +[Rr]eleasePS/ +dlldata.c + +# Benchmark Results +BenchmarkDotNet.Artifacts/ + +# .NET +project.lock.json +project.fragment.lock.json +artifacts/ + +# Tye +.tye/ + +# ASP.NET Scaffolding +ScaffoldingReadMe.txt + +# StyleCop +StyleCopReport.xml + +# Files built by Visual Studio +*_i.c +*_p.c +*_h.h +*.ilk +*.meta +*.obj +*.iobj +*.pch +*.pdb +*.ipdb +*.pgc +*.pgd +*.rsp +*.sbr +*.tlb +*.tli +*.tlh +*.tmp +*.tmp_proj +*_wpftmp.csproj +*.log +*.tlog +*.vspscc +*.vssscc +.builds +*.pidb +*.svclog +*.scc + +# Chutzpah Test files +_Chutzpah* + +# Visual C++ cache files +ipch/ +*.aps +*.ncb +*.opendb +*.opensdf +*.sdf +*.cachefile +*.VC.db +*.VC.VC.opendb + +# Visual Studio profiler +*.psess +*.vsp +*.vspx +*.sap + +# Visual Studio Trace Files +*.e2e + +# TFS 2012 Local Workspace +$tf/ + +# Guidance Automation Toolkit +*.gpState + +# ReSharper is a .NET coding add-in +_ReSharper*/ +*.[Rr]e[Ss]harper +*.DotSettings.user + +# TeamCity is a build add-in +_TeamCity* + +# DotCover is a Code Coverage Tool +*.dotCover + +# AxoCover is a Code Coverage Tool +.axoCover/* +!.axoCover/settings.json + +# Coverlet is a free, cross platform Code Coverage Tool +coverage*.json +coverage*.xml +coverage*.info + +# Visual Studio code coverage results +*.coverage +*.coveragexml + +# NCrunch +_NCrunch_* +.*crunch*.local.xml +nCrunchTemp_* + +# MightyMoose +*.mm.* +AutoTest.Net/ + +# Web workbench (sass) +.sass-cache/ + +# Installshield output folder +[Ee]xpress/ + +# DocProject is a documentation generator add-in +DocProject/buildhelp/ +DocProject/Help/*.HxT +DocProject/Help/*.HxC +DocProject/Help/*.hhc +DocProject/Help/*.hhk +DocProject/Help/*.hhp +DocProject/Help/Html2 +DocProject/Help/html + +# Click-Once directory +publish/ + +# Publish Web Output +*.[Pp]ublish.xml +*.azurePubxml +# Note: Comment the next line if you want to checkin your web deploy settings, +# but database connection strings (with potential passwords) will be unencrypted +*.pubxml +*.publishproj + +# Microsoft Azure Web App publish settings. Comment the next line if you want to +# checkin your Azure Web App publish settings, but sensitive information contained +# in these scripts will be unencrypted +PublishScripts/ + +# NuGet Packages +*.nupkg +# NuGet Symbol Packages +*.snupkg +# The packages folder can be ignored because of Package Restore +**/[Pp]ackages/* +# except build/, which is used as an MSBuild target. +!**/[Pp]ackages/build/ +# Uncomment if necessary however generally it will be regenerated when needed +#!**/[Pp]ackages/repositories.config +# NuGet v3's project.json files produces more ignorable files +*.nuget.props +*.nuget.targets + +# Microsoft Azure Build Output +csx/ +*.build.csdef + +# Microsoft Azure Emulator +ecf/ +rcf/ + +# Windows Store app package directories and files +AppPackages/ +BundleArtifacts/ +Package.StoreAssociation.xml +_pkginfo.txt +*.appx +*.appxbundle +*.appxupload + +# Visual Studio cache files +# files ending in .cache can be ignored +*.[Cc]ache +# but keep track of directories ending in .cache +!?*.[Cc]ache/ + +# Others +ClientBin/ +~$* +*~ +*.dbmdl +*.dbproj.schemaview +*.jfm +*.pfx +*.publishsettings +orleans.codegen.cs + +# Including strong name files can present a security risk +# (https://github.com/github/gitignore/pull/2483#issue-259490424) +#*.snk + +# Since there are multiple workflows, uncomment next line to ignore bower_components +# (https://github.com/github/gitignore/pull/1529#issuecomment-104372622) +#bower_components/ + +# RIA/Silverlight projects +Generated_Code/ + +# Backup & report files from converting an old project file +# to a newer Visual Studio version. Backup files are not needed, +# because we have git ;-) +_UpgradeReport_Files/ +Backup*/ +UpgradeLog*.XML +UpgradeLog*.htm +ServiceFabricBackup/ +*.rptproj.bak + +# SQL Server files +*.mdf +*.ldf +*.ndf + +# Business Intelligence projects +*.rdl.data +*.bim.layout +*.bim_*.settings +*.rptproj.rsuser +*- [Bb]ackup.rdl +*- [Bb]ackup ([0-9]).rdl +*- [Bb]ackup ([0-9][0-9]).rdl + +# Microsoft Fakes +FakesAssemblies/ + +# GhostDoc plugin setting file +*.GhostDoc.xml + +# Node.js Tools for Visual Studio +.ntvs_analysis.dat +node_modules/ + +# Visual Studio 6 build log +*.plg + +# Visual Studio 6 workspace options file +*.opt + +# Visual Studio 6 auto-generated workspace file (contains which files were open etc.) +*.vbw + +# Visual Studio 6 auto-generated project file (contains which files were open etc.) +*.vbp + +# Visual Studio 6 workspace and project file (working project files containing files to include in project) +*.dsw +*.dsp + +# Visual Studio 6 technical files +*.ncb +*.aps + +# Visual Studio LightSwitch build output +**/*.HTMLClient/GeneratedArtifacts +**/*.DesktopClient/GeneratedArtifacts +**/*.DesktopClient/ModelManifest.xml +**/*.Server/GeneratedArtifacts +**/*.Server/ModelManifest.xml +_Pvt_Extensions + +# Paket dependency manager +.paket/paket.exe +paket-files/ + +# FAKE - F# Make +.fake/ + +# CodeRush personal settings +.cr/personal + +# Python Tools for Visual Studio (PTVS) +__pycache__/ +*.pyc + +# Cake - Uncomment if you are using it +# tools/** +# !tools/packages.config + +# Tabs Studio +*.tss + +# Telerik's JustMock configuration file +*.jmconfig + +# BizTalk build output +*.btp.cs +*.btm.cs +*.odx.cs +*.xsd.cs + +# OpenCover UI analysis results +OpenCover/ + +# Azure Stream Analytics local run output +ASALocalRun/ + +# MSBuild Binary and Structured Log +*.binlog + +# NVidia Nsight GPU debugger configuration file +*.nvuser + +# MFractors (Xamarin productivity tool) working folder +.mfractor/ + +# Local History for Visual Studio +.localhistory/ + +# Visual Studio History (VSHistory) files +.vshistory/ + +# BeatPulse healthcheck temp database +healthchecksdb + +# Backup folder for Package Reference Convert tool in Visual Studio 2017 +MigrationBackup/ + +# Ionide (cross platform F# VS Code tools) working folder +.ionide/ + +# Fody - auto-generated XML schema +FodyWeavers.xsd + +# VS Code files for those working on multiple tools +.vscode/* +!.vscode/settings.json +!.vscode/tasks.json +!.vscode/launch.json +!.vscode/extensions.json +*.code-workspace + +# Local History for Visual Studio Code +.history/ + +# Windows Installer files from build outputs +*.cab +*.msi +*.msix +*.msm +*.msp + +# JetBrains Rider +*.sln.iml +.idea + +## +## Visual studio for Mac +## + + +# globs +Makefile.in +*.userprefs +*.usertasks +config.make +config.status +aclocal.m4 +install-sh +autom4te.cache/ +*.tar.gz +tarballs/ +test-results/ + +# Mac bundle stuff +*.dmg +*.app + +# content below from: https://github.com/github/gitignore/blob/master/Global/macOS.gitignore +# General +.DS_Store +.AppleDouble +.LSOverride + +# Icon must end with two \r +Icon + + +# Thumbnails +._* + +# Files that might appear in the root of a volume +.DocumentRevisions-V100 +.fseventsd +.Spotlight-V100 +.TemporaryItems +.Trashes +.VolumeIcon.icns +.com.apple.timemachine.donotpresent + +# Directories potentially created on remote AFP share +.AppleDB +.AppleDesktop +Network Trash Folder +Temporary Items +.apdisk + +# content below from: https://github.com/github/gitignore/blob/master/Global/Windows.gitignore +# Windows thumbnail cache files +Thumbs.db +ehthumbs.db +ehthumbs_vista.db + +# Dump file +*.stackdump + +# Folder config file +[Dd]esktop.ini + +# Recycle Bin used on file shares +$RECYCLE.BIN/ + +# Windows Installer files +*.cab +*.msi +*.msix +*.msm +*.msp + +# Windows shortcuts +*.lnk + +# Vim temporary swap files +*.swp diff --git a/PatchSync.CLI/CLI/CLIContext.cs b/PatchSync.CLI/CLI/CLIContext.cs new file mode 100644 index 0000000..4cae5c0 --- /dev/null +++ b/PatchSync.CLI/CLI/CLIContext.cs @@ -0,0 +1,21 @@ +namespace PatchSync.CLI; + +public class CLIContext +{ + private readonly Dictionary _context = new(); + + public bool TryGetProperty(string key, out T? t) + { + if (_context.TryGetValue(key, out var value) && value is T tValue) + { + t = tValue; + return true; + } + + t = default; + return false; + } + + public void SetProperty(string key, T? value) => + _context[key] = value!; +} diff --git a/PatchSync.CLI/CLI/Text.cs b/PatchSync.CLI/CLI/Text.cs new file mode 100644 index 0000000..4a80686 --- /dev/null +++ b/PatchSync.CLI/CLI/Text.cs @@ -0,0 +1,17 @@ +using Spectre.Console; + +namespace PatchSync.CLI; + +public static class CLIText +{ + public static void WriteFilePathText(string path) + { + var manifestTextPath = new TextPath(Path.GetFullPath(path)); + manifestTextPath.StemStyle = new Style(Color.Blue); + + var isDirectory = File.GetAttributes(path).HasFlag(FileAttributes.Directory); + manifestTextPath.LeafStyle = new Style(isDirectory ? Color.Blue : Color.Green); + + AnsiConsole.Write(manifestTextPath); + } +} diff --git a/PatchSync.CLI/Commands/Build Signatures/BuildSignatures.Prompts.cs b/PatchSync.CLI/Commands/Build Signatures/BuildSignatures.Prompts.cs new file mode 100644 index 0000000..f7f20fa --- /dev/null +++ b/PatchSync.CLI/Commands/Build Signatures/BuildSignatures.Prompts.cs @@ -0,0 +1,39 @@ +using PatchSync.Common.Manifest; +using Spectre.Console; + +namespace PatchSync.CLI.Commands; + +public partial class BuildSignatures +{ + private static string GetInputFolder() => + new FileBrowser + { + Title = "Please choose the folder to build [green]signatures[/]", + PromptMessage = "Please choose the folder to build [green]signatures[/]" + } + .SelectDirectory() + .GetPath(); + + private static string GetOutputFolder(string workingDirectory) => + new FileBrowser + { + Title = "Please choose the folder to save the [green]signatures[/]", + PromptMessage = "Please choose the folder to save the [green]signatures[/]", + WorkingDirectory = workingDirectory + } + .SelectDirectory() + .GetPath(); + + private static PatchChannel GetChannel() + { + var result = AnsiConsole.Prompt( + new SelectionPrompt() + .Title("Select the [green]patch channel[/]:") + .AddChoices(Enum.GetValues()) + ); + + AnsiConsole.MarkupLineInterpolated($"Select the [green]patch channel[/]: [blue]{result}[/]"); + + return result; + } +} diff --git a/PatchSync.CLI/Commands/Build Signatures/BuildSignatures.cs b/PatchSync.CLI/Commands/Build Signatures/BuildSignatures.cs new file mode 100644 index 0000000..6c4057d --- /dev/null +++ b/PatchSync.CLI/Commands/Build Signatures/BuildSignatures.cs @@ -0,0 +1,171 @@ +using System.Collections.Concurrent; +using System.Security.Cryptography; +using System.Text.Json; +using PatchSync.CLI.Json; +using PatchSync.CLI.Threading; +using PatchSync.Common.Manifest; +using PatchSync.Common.Text; +using PatchSync.Manifest; +using PatchSync.Signatures; +using Spectre.Console; + +namespace PatchSync.CLI.Commands; + +public partial class BuildSignatures : ICommand +{ + private string? _baseFolder; + private ProgressContext? _ctx; + private ConcurrentQueue? _manifestFileEntries; + private string? _outputFolder; + private string? _signatureOutputFolder; + + public string Name => "Build Signatures"; + + public void ExecuteCommand(CLIContext cliContext) + { + _baseFolder = GetInputFolder(); + _outputFolder = GetOutputFolder(_baseFolder); + var channel = GetChannel(); + + _outputFolder = Path.Combine(_outputFolder, channel.ToString().ToLower()); + + AnsiConsole.Write(new Rule("[green3]Building Signatures[/]")); + + var files = Directory.GetFiles(_baseFolder, "*", SearchOption.AllDirectories) + .Where( + file => + { + var fi = new FileInfo(file); + + return fi.Extension != ".sig" && fi.Name != "manifest.json"; + } + ) + .ToArray(); + + Array.Sort( + files, + (a, b) => + { + var af = new FileInfo(a); + var bf = new FileInfo(b); + + // Smallest files first + var result = (int)(af.Length - bf.Length); + + // Then alphabetical + return result == 0 ? string.Compare(bf.Name, af.Name, StringComparison.Ordinal) : result; + } + ); + + _manifestFileEntries = new ConcurrentQueue(); + + var now = DateTime.UtcNow; + _signatureOutputFolder = Path.Combine(_outputFolder, now.ToString("yyyy-MM-dd-HH-mm-ss")); + + AnsiConsole.Progress() + .Columns( + new TaskDescriptionColumn(), + new ProgressBarColumn(), + new PercentageColumn(), + new SpinnerColumn(), + new DownloadedColumn() + ) + .HideCompleted(true) + .AutoClear(true) + .Start( + ctx => + { + _ctx = ctx; + + foreach (var file in files) + { + var relativeFilePath = Path.GetRelativePath(_baseFolder, file); + var signatureFilePath = Path.Combine(_signatureOutputFolder, relativeFilePath); + Directory.CreateDirectory(Path.GetDirectoryName(signatureFilePath)!); + } + + ThreadWorker.MapParallel(files, DoWork); + } + ); + + AnsiConsole.WriteLine(); + AnsiConsole.MarkupLineInterpolated($"Signatures saved to [green]{Path.GetFullPath(_signatureOutputFolder)}[/]"); + AnsiConsole.WriteLine(); + + AnsiConsole.Write(new Rule("[green3]Building Manifest[/]")); + + var sortedFiles = _manifestFileEntries.ToArray(); + Array.Sort( + sortedFiles, + (a, b) => + StringComparer.OrdinalIgnoreCase.Compare(a.FilePath, b.FilePath) + ); + + var manifestPath = Path.Combine(_outputFolder, "manifest.json"); + var patchManifest = ManifestBuilder.GenerateManifest( + channel.ToString().ToLower(), + sortedFiles, + now + ); + + using var stream = File.Open(manifestPath, FileMode.Create); + + JsonSerializer.Serialize(stream, patchManifest, JsonSourceGenerationContext.Default.PatchManifest); + + AnsiConsole.WriteLine(); + AnsiConsole.MarkupLineInterpolated($"Signatures saved to [green]{manifestPath}[/]"); + AnsiConsole.WriteLine(); + + cliContext.SetProperty(nameof(PatchManifest), (patchManifest, manifestPath)); + + AnsiConsole.Write(new Rule("[green3]Finished[/]")); + } + + private void DoWork( + string file + ) + { + var fi = new FileInfo(file); + var fileSize = fi.Length; + var task = _ctx!.AddTask($"[green]{Path.GetFileName(file)}[/]"); + var relativeFilePath = Path.GetRelativePath(_baseFolder!, file); + + // Generate Signature + if (fileSize >= 1024) + { + var signatureFilePath = Path.Combine(_signatureOutputFolder!, $"{relativeFilePath}.sig"); + var chunkSize = ManifestFileEntry.GetChunkSize(fileSize); + var (fastHash, fullHash) = SignatureGenerator.GenerateSignature(file, signatureFilePath, chunkSize, task); + var manifestEntry = new ManifestFileEntry( + ManifestFileCommand.DeltaUpdate, + relativeFilePath, + fi.Length + ) + { + ChunkSize = chunkSize, + FastHash = fastHash.ToString(), + Hash = fullHash.ToHexString() + }; + + _manifestFileEntries!.Enqueue(manifestEntry); + } + else + { + task.MaxValue(fi.Length).StartTask(); + Span buffer = stackalloc byte[32]; + using var stream = File.Open(file, FileMode.Open, FileAccess.Read); + SHA256.HashData(stream, buffer); + var manifestEntry = new ManifestFileEntry( + ManifestFileCommand.UpdateIfFullHashMismatch, + relativeFilePath, + fi.Length + ) + { + Hash = buffer.ToHexString() + }; + _manifestFileEntries!.Enqueue(manifestEntry); + task.Increment(fi.Length); + task.StopTask(); + } + } +} diff --git a/PatchSync.CLI/Commands/CommandHandler.cs b/PatchSync.CLI/Commands/CommandHandler.cs new file mode 100644 index 0000000..14bb43d --- /dev/null +++ b/PatchSync.CLI/Commands/CommandHandler.cs @@ -0,0 +1,23 @@ +using Spectre.Console; + +namespace PatchSync.CLI.Commands; + +public static class CommandHandler +{ + private static readonly List _commands = new(); + + public static void Register(ICommand command) + { + _commands.Add(command); + } + + public static ICommand PromptCommands() + { + return AnsiConsole.Prompt( + new SelectionPrompt() + .Title("What would you like to do?") + .AddChoices(_commands) + .UseConverter(command => command.Name) + ); + } +} diff --git a/PatchSync.CLI/Commands/ICommand.cs b/PatchSync.CLI/Commands/ICommand.cs new file mode 100644 index 0000000..ec6f03f --- /dev/null +++ b/PatchSync.CLI/Commands/ICommand.cs @@ -0,0 +1,7 @@ +namespace PatchSync.CLI.Commands; + +public interface ICommand +{ + string Name { get; } + void ExecuteCommand(CLIContext cliContext); +} diff --git a/PatchSync.CLI/Commands/Patch Installation/PatchInstallation.Prompts.cs b/PatchSync.CLI/Commands/Patch Installation/PatchInstallation.Prompts.cs new file mode 100644 index 0000000..0fd3e76 --- /dev/null +++ b/PatchSync.CLI/Commands/Patch Installation/PatchInstallation.Prompts.cs @@ -0,0 +1,32 @@ +using Spectre.Console; + +namespace PatchSync.CLI.Commands.Patch_Installation; + +public partial class PatchInstallation +{ + private static string GetInputFolder() => + new FileBrowser + { + Title = "Please choose the folder to patch [green]files[/]", + PromptMessage = "Please choose the folder to patch [green]files[/]" + } + .SelectDirectory() + .GetPath(); + + private static string GetManifestFile() => + new FileBrowser + { + Title = "Please select the manifest file [green]manifest[/]", + SearchPattern = "manifest.json" + } + .SelectFile() + .GetPath(); + + private static bool PromptReadyToPatch() => + AnsiConsole.Prompt( + new ConfirmationPrompt("Press yes to start [green]patching[/]:") + { + DefaultValue = true + } + ); +} diff --git a/PatchSync.CLI/Commands/Patch Installation/PatchInstallation.cs b/PatchSync.CLI/Commands/Patch Installation/PatchInstallation.cs new file mode 100644 index 0000000..4b8844a --- /dev/null +++ b/PatchSync.CLI/Commands/Patch Installation/PatchInstallation.cs @@ -0,0 +1,356 @@ +using System.IO.Hashing; +using System.IO.MemoryMappedFiles; +using System.Security.Cryptography; +using System.Text.Json; +using PatchSync.CLI.Json; +using PatchSync.Common.Manifest; +using PatchSync.Common.Signatures; +using PatchSync.Common.Text; +using PatchSync.SDK; +using PatchSync.SDK.Signatures; +using Spectre.Console; + +namespace PatchSync.CLI.Commands.Patch_Installation; + +public partial class PatchInstallation : ICommand +{ + public string Name => "Patch Installation"; + + public void ExecuteCommand(CLIContext cliContext) + { + var manifestFilePath = GetManifestFile(); + var manifestFileInfo = new FileInfo(manifestFilePath); + + PatchManifest? manifest; + using (var manifestStream = File.Open(manifestFilePath, FileMode.Open, FileAccess.Read)) + { + manifest = JsonSerializer.Deserialize( + manifestStream, + JsonSourceGenerationContext.Default.PatchManifest + ); + } + + if (manifest == null) + { + throw new Exception("Failed to deserialize manifest.json"); + } + + var installationPath = GetInputFolder(); + + var filesInInstallation = Directory + .EnumerateFiles(installationPath, "*", SearchOption.AllDirectories) + .Select(filePath => Path.GetRelativePath(installationPath, filePath)) + .ToHashSet(); + + var patchFolder = Path.Combine(manifestFileInfo.DirectoryName, manifest.Date.ToString("yyyy-MM-dd-HH-mm-ss")); + + var hasher = new XxHash3(); + + var commandsToExecute = manifest.Files.Where( + entry => + { + if (!File.Exists(Path.Combine(patchFolder, $"{entry.FilePath}.sig"))) + { + throw new Exception($"Missing patch file from manifest: {entry.FilePath}"); + } + + if (entry.Command is ManifestFileCommand.AlwaysFullUpdate) + { + return true; + } + + // File is missing + if (!filesInInstallation.Contains(entry.FilePath)) + { + return entry.Command is not ManifestFileCommand.Delete; + } + + if (entry.Command is ManifestFileCommand.UpdateIfMissing) + { + return false; + } + + var fullPath = Path.Combine(installationPath, entry.FilePath); + + // No hash indicates a full update + if (string.IsNullOrWhiteSpace(entry.Hash)) + { + return true; + } + + if (entry.Command is ManifestFileCommand.DeltaUpdate) + { + using var file = File.Open(fullPath, FileMode.Open, FileAccess.Read); + hasher.Append(file); + var existingHash = hasher.GetCurrentHashAsUInt64().ToString(); + hasher.Reset(); + + if (existingHash != entry.FastHash) + { + return true; + } + } + else if (entry.Command is ManifestFileCommand.Delete) + { + return true; + } + + if (entry.Command is ManifestFileCommand.UpdateIfFullHashMismatch) + { + using var stream = File.Open(fullPath, FileMode.Open, FileAccess.Read); + Span fullHashBuffer = stackalloc byte[32]; + SHA256.HashData(stream, fullHashBuffer); + + var hash = fullHashBuffer.ToHexString(); + if (hash != entry.Hash) + { + return true; + } + } + + return false; + } + ).ToList(); + + if (commandsToExecute.Count == 0) + { + AnsiConsole.MarkupLine("Installation is [green]up to date[/]."); + AnsiConsole.WriteLine(); + return; + } + + var table = new Table(); + table.AddColumn("File"); + table.AddColumn("File"); + table.Border(TableBorder.Heavy); + table.Collapse(); + + var manifestFiles = commandsToExecute.OrderBy(entry => entry.FilePath, StringComparer.OrdinalIgnoreCase).ToArray(); + + var length = manifestFiles.Length; + var halfLength = length / 2; + for (var i = 0; i < halfLength; i++) + { + var manifestFile = manifestFiles[i]; + var markup = Markup.FromInterpolated($"{manifestFile.Command.GetIcon()} {manifestFile.FilePath}"); + + if (halfLength + i >= length) + { + table.AddRow(markup); + continue; + } + + manifestFile = manifestFiles[halfLength + i]; + table.AddRow( + markup, + Markup.FromInterpolated($"{manifestFile.Command.GetIcon()} {manifestFile.FilePath}") + ); + } + + AnsiConsole.Write(table); + + if (!PromptReadyToPatch()) + { + return; + } + + var tempFolder = Path.Combine(Path.GetTempPath(), Path.GetRandomFileName()); + Directory.CreateDirectory(tempFolder); + + Span buffer = stackalloc byte[32]; + + foreach (var command in commandsToExecute) + { + var file = Path.Combine(installationPath, command.FilePath); + var remoteFile = Path.Combine(patchFolder, command.FilePath); + + if (!filesInInstallation.Contains(command.FilePath)) + { + if (command.Command is not ManifestFileCommand.Delete and not ManifestFileCommand.NeverUpdate) + { + File.Copy(remoteFile, file); + } + continue; + } + + switch (command.Command) + { + case ManifestFileCommand.Delete: + { + File.Delete(file); + break; + } + case ManifestFileCommand.AlwaysFullUpdate: + { + File.Delete(file); + File.Copy(remoteFile, file); + break; + } + case ManifestFileCommand.UpdateIfFullHashMismatch: + { + var fi = new FileInfo(file); + + var hasChanges = command.FileSize != fi.Length; + if (!hasChanges) + { + using var stream = File.Open(file, FileMode.Open, FileAccess.Read); + SHA256.HashData(stream, buffer); + hasChanges = command.Hash != buffer.ToHexString(); + } + + if (hasChanges) + { + File.Delete(file); + File.Copy(remoteFile, file); + } + + break; + } + case ManifestFileCommand.DeltaUpdate: + { + // May not be the same (different remote path, or URL) + var signatureFilePath = Path.Combine(patchFolder, $"{command.FilePath}.sig"); + + if (!DoLocalDeltaPatching(command, signatureFilePath, file, remoteFile, tempFolder, command.ChunkSize)) + { + File.Delete(file); + File.Copy(remoteFile, file); + } + + break; + } + } + } + } + + private static bool DoLocalDeltaPatching( + ManifestFileEntry command, + string signatureFilePath, + string installFilePath, + string remoteFilePath, + string tempFolderPath, + int chunkSize + ) + { + SignatureFile sigFile; + using (var sigFileStream = File.Open(signatureFilePath, FileMode.Open, FileAccess.Read)) + { + sigFile = SignatureFileHandler.LoadSignature(command.FileSize, sigFileStream, chunkSize); + } + + var tempFilePath = Path.Combine(tempFolderPath, command.FilePath); + var hasher = SHA256.Create(); + + using (var mmf = MemoryMappedFile.CreateFromFile(installFilePath, FileMode.Open)) + { + using var installationFileStream = mmf.CreateViewStream(); + var deltas = FilePatcher.GetPatchDeltas(installationFileStream, sigFile); + + var anyLocal = false; + var anyRemote = false; + var anyLocalOutOfOrder = false; + var remoteThreshold = false; + + // TODO: This should be calculated and added to the manifest. Also should include a link to a pre-zipped version of the file. + var threshold = (long)(command.FileSize / 1.5); // 2/3rds are changes + var remoteTotal = 0L; + + // We do not handle remaining data, since it is available in the sig file, and tacked onto the end. + for (var i = 0; !(anyLocal && anyRemote || remoteThreshold) && i < deltas.Length; i++) + { + var delta = deltas[i]; + if (delta.Location is PatchSliceLocation.ExistingSlice) + { + anyLocal = true; + if (!anyLocalOutOfOrder && delta.Offset != chunkSize * i) + { + anyLocalOutOfOrder = true; + } + } + + if (delta.Location is PatchSliceLocation.RemoteSlice) + { + anyRemote = true; + remoteTotal += chunkSize; + if (remoteTotal >= threshold) + { + remoteThreshold = true; + } + } + } + + // We should just download the entire file + if (remoteThreshold) + { + return false; + } + + // If we don't have any remote slices, and no out of order local slices, then check if the file is the same. + // Check length, then SHA256 hash. + if (!anyRemote && !anyLocalOutOfOrder && installationFileStream.Length == command.FileSize) + { + Span buffer = stackalloc byte[32]; + installationFileStream.Seek(0, SeekOrigin.Begin); + SHA256.HashData(installationFileStream, buffer); + return command.Hash != buffer.ToHexString(); + } + + // Generally would be piece-wise downloads from the web server + using var remoteMmf = MemoryMappedFile.CreateFromFile(remoteFilePath, FileMode.Open); + using var remoteFileStream = remoteMmf.CreateViewStream(); + + // Build a temporary file + + Directory.CreateDirectory(Path.GetDirectoryName(tempFilePath)); // For nested paths + + using (var tempFile = File.Create(tempFilePath)) + { + var chunk = new byte[chunkSize]; + foreach (var delta in deltas) + { + var sourceStream = delta.Location is PatchSliceLocation.ExistingSlice + ? installationFileStream + : remoteFileStream; + + sourceStream.Seek(delta.Offset, SeekOrigin.Begin); + + var bytesRead = 0; + while (bytesRead < chunkSize) + { + int read = sourceStream.Read(chunk, bytesRead, chunkSize - bytesRead); + if (read == 0) + { + throw new IOException("Prematurely reached end of file."); + } + + bytesRead += read; + } + + tempFile.Write(chunk); + hasher.TransformBlock(chunk, 0, chunkSize, null, 0); + } + + if (sigFile.RemainingData.Length > 0) + { + tempFile.Write(sigFile.RemainingData); + hasher.TransformFinalBlock(sigFile.RemainingData, 0, sigFile.RemainingData.Length); + } + else + { + hasher.TransformFinalBlock(chunk, 0, 0); + } + } + } + + if (command.Hash == hasher.Hash.ToHexString()) + { + File.Delete(installFilePath); + File.Move(tempFilePath, installFilePath); + Directory.Delete(tempFolderPath, true); + return true; + } + + Directory.Delete(tempFolderPath, true); + return false; + } +} diff --git a/PatchSync.CLI/Commands/QuitCLI.cs b/PatchSync.CLI/Commands/QuitCLI.cs new file mode 100644 index 0000000..5bd2bf2 --- /dev/null +++ b/PatchSync.CLI/Commands/QuitCLI.cs @@ -0,0 +1,11 @@ +namespace PatchSync.CLI.Commands; + +public class QuitCLI : ICommand +{ + public string Name => "Quit"; + + public void ExecuteCommand(CLIContext cliContext) + { + cliContext.SetProperty("exit", true); + } +} diff --git a/PatchSync.CLI/Commands/Upload Signatures/UploadSignatures.Prompts.cs b/PatchSync.CLI/Commands/Upload Signatures/UploadSignatures.Prompts.cs new file mode 100644 index 0000000..36d4ed5 --- /dev/null +++ b/PatchSync.CLI/Commands/Upload Signatures/UploadSignatures.Prompts.cs @@ -0,0 +1,293 @@ +using Amazon.Runtime; +using Spectre.Console; + +namespace PatchSync.CLI.Commands; + +public partial class UploadSignatures +{ + private static bool PromptExistingManifest() => + AnsiConsole.Prompt( + new ConfirmationPrompt("Use newly created patch manifest at [green]patch manifest[/]:") + { + DefaultValue = true + } + ); + + private static string GetManifestFile() => + new FileBrowser + { + Title = "Please choose the [green]manifest.json[/] file", + SearchPattern = "manifest.json" + } + .SelectFile() + .GetPath(); + + private static UploadProvider GetUploadProvider() + { + var value = AnsiConsole.Prompt( + new SelectionPrompt() + .Title("Select the [green]hosting provider[/]:") + .AddChoices(Enum.GetValues()) + ); + + AnsiConsole.MarkupLineInterpolated($"Select the [green]hosting provider[/]: [blue]{value}[/]"); + return value; + } + + private static string GetCloudFlareAccountId() + { + var accountId = Environment.GetEnvironmentVariable("API_ACCOUNT_ID"); + if (accountId != null) + { + AnsiConsole.MarkupLineInterpolated($"Cloudflare Account ID: [blue]{accountId}[/]"); + return accountId; + } + + return AnsiConsole.Prompt( + new TextPrompt("Please specify the [green]CloudFlare Account ID[/]:") + .PromptStyle("blue") + .Validate( + accountId => + { + if (string.IsNullOrWhiteSpace(accountId)) + { + return ValidationResult.Error("[red]You must specify a valid account id.[/]"); + } + + return ValidationResult.Success(); + } + ) + ); + } + + private static string GetBackBlazeRegion() + { + var endpointOrRegion = AnsiConsole.Prompt( + new TextPrompt("Please specify the Backblaze [green]endpoint[/] or [green]region[/]:") + .DefaultValue("us-west-000") + .ShowDefaultValue(true) + .PromptStyle("blue") + .Validate( + endpointUrl => + { + // s3.us-west-000.backblazeb2.com + if (string.IsNullOrWhiteSpace(endpointUrl)) + { + return ValidationResult.Error("[red]You must specify a valid endpoint url or region.[/]"); + } + + if (endpointUrl.EndsWith("backblazeb2.com")) + { + var endpointUri = new Uri(endpointUrl); + if (!endpointUri.Host.StartsWith("s3")) + { + return ValidationResult.Error("[red]You must specify a valid endpoint url or region.[/]"); + } + } + else if (endpointUrl.Split('-').Length != 3) + { + return ValidationResult.Error("[red]You must specify a valid endpoint url or region.[/]"); + } + + return ValidationResult.Success(); + } + ) + ); + + return endpointOrRegion.EndsWith("backblazeb2.com") + ? new Uri(endpointOrRegion).Host.Split('.')[1] + : endpointOrRegion; + } + + private static string GetDigitalOceanRegion() + { + var endpointOrRegion = AnsiConsole.Prompt( + new TextPrompt("Please specify the Digital Ocean [green]endpoint[/] or [green]region[/]:") + .DefaultValue("nyc3") + .ShowDefaultValue(true) + .PromptStyle("blue") + .Validate( + endpointUrl => + { + // nyc3.digitaloceanspaces.com + if (string.IsNullOrWhiteSpace(endpointUrl)) + { + return ValidationResult.Error("[red]You must specify a valid endpoint url or region.[/]"); + } + + if (!endpointUrl.EndsWith("digitaloceanspaces.com")) + { + return ValidationResult.Error("[red]You must specify a valid endpoint url or region.[/]"); + } + + return ValidationResult.Success(); + } + ) + ); + + return endpointOrRegion.EndsWith("digitaloceanspaces.com") + ? new Uri(endpointOrRegion).Host.Split('.')[0] + : endpointOrRegion; + } + + private static string GetLinodeRegion() + { + var endpointOrRegion = AnsiConsole.Prompt( + new TextPrompt("Please specify the Linode [green]endpoint[/] or [green]region[/]:") + .DefaultValue("us-east-1") + .ShowDefaultValue(true) + .PromptStyle("blue") + .Validate( + endpointUrl => + { + // us-east-1.linodeobjects.com + if (string.IsNullOrWhiteSpace(endpointUrl)) + { + return ValidationResult.Error("[red]You must specify a valid endpoint url or region.[/]"); + } + + if (!endpointUrl.EndsWith("linodeobjects.com")) + { + return ValidationResult.Error("[red]You must specify a valid endpoint url or region.[/]"); + } + + return ValidationResult.Success(); + } + ) + ); + + return endpointOrRegion.EndsWith("linodeobjects.com") + ? new Uri(endpointOrRegion).Host.Split('.')[0] + : endpointOrRegion; + } + + private static string GetGenericEndpoint() + { + var endpointOrRegion = AnsiConsole.Prompt( + new TextPrompt("Please specify the S3 compatible storage provider [green]endpoint[/]:") + .PromptStyle("blue") + .Validate( + endpointUrl => + { + if (string.IsNullOrWhiteSpace(endpointUrl)) + { + return ValidationResult.Error("[red]You must specify a valid endpoint url or region.[/]"); + } + + if (!Uri.TryCreate(endpointUrl, UriKind.Absolute, out _)) + { + return ValidationResult.Error("[red]You must specify a valid endpoint url or region.[/]"); + } + + return ValidationResult.Success(); + } + ) + ); + + return endpointOrRegion.EndsWith("linodeobjects.com") + ? new Uri(endpointOrRegion).Host.Split('.')[0] + : endpointOrRegion; + } + + private static string GetBucket() + { + return AnsiConsole.Prompt( + new TextPrompt("Please specify the [green]bucket[/]:") + .PromptStyle("blue") + .Validate( + bucket => + { + if (string.IsNullOrWhiteSpace(bucket)) + { + return ValidationResult.Error("[red]You must specify a valid bucket.[/]"); + } + + return ValidationResult.Success(); + } + ) + ); + } + + private static string GetBasePath() + { + return AnsiConsole.Prompt( + new TextPrompt("Please specify the [green]base path[/]:") + .PromptStyle("blue") + .Validate( + path => + { + if (string.IsNullOrWhiteSpace(path)) + { + return ValidationResult.Error("[red]You must specify a valid path.[/]"); + } + + return ValidationResult.Success(); + } + ) + ); + } + + private static bool PromptLooksCorrect(string? endpointUrl, string bucket, string path) + { + AnsiConsole.WriteLine(); + if (endpointUrl != null) + { + AnsiConsole.MarkupLineInterpolated($"Endpoint set to: [green]{endpointUrl}[/]"); + } + + Uri.TryCreate(new Uri($"s3://{bucket}"), path, out var s3Path); + + AnsiConsole.MarkupLineInterpolated($"Path to upload set: [green]{s3Path}[/]"); + return AnsiConsole.Prompt( + new ConfirmationPrompt("Does everything look correct?") + { + DefaultValue = true + } + ); + } + + private static BasicAWSCredentials PromptMissingCredentials() + { + AnsiConsole.WriteLine(); + AnsiConsole.MarkupLine("[red]Error:[/] Could not find S3-compatible credentials."); + AnsiConsole.WriteLine(); + AnsiConsole.MarkupLine("To store credentials securely, consult the AWS CLI credentials documentation:"); + AnsiConsole.MarkupLine("- [green]https://docs.aws.amazon.com/cli/latest/userguide/cli-configure-files.html[/]"); + AnsiConsole.WriteLine(); + + var accessKey = AnsiConsole.Prompt( + new TextPrompt("Please specify the [green]access key[/]:") + .PromptStyle("blue") + .Validate( + path => + { + if (string.IsNullOrWhiteSpace(path)) + { + return ValidationResult.Error("[red]You must specify a valid access key.[/]"); + } + + return ValidationResult.Success(); + } + ) + ); + + var secretKey = AnsiConsole.Prompt( + new TextPrompt("Please specify the [green]secret key[/] (not stored):") + .PromptStyle("darkorange3") + .Secret() + .Validate( + path => + { + if (string.IsNullOrWhiteSpace(path)) + { + return ValidationResult.Error("[red]You must specify a valid secret key.[/]"); + } + + return ValidationResult.Success(); + } + ) + ); + + return new BasicAWSCredentials(accessKey, secretKey); + } +} diff --git a/PatchSync.CLI/Commands/Upload Signatures/UploadSignatures.cs b/PatchSync.CLI/Commands/Upload Signatures/UploadSignatures.cs new file mode 100644 index 0000000..9201727 --- /dev/null +++ b/PatchSync.CLI/Commands/Upload Signatures/UploadSignatures.cs @@ -0,0 +1,111 @@ +using System.Text.Json; +using Amazon.Runtime; +using Amazon.S3; +using PatchSync.CLI.Json; +using PatchSync.Common.Manifest; +using Spectre.Console; + +namespace PatchSync.CLI.Commands; + +public partial class UploadSignatures : ICommand +{ + public string Name => "Upload Signatures"; + + public void ExecuteCommand(CLIContext cliContext) + { + AWSCredentials credentials; + try + { + AnsiConsole.MarkupLine("[green]Attempting to retrieve S3-compatible credentials...[/]"); + credentials = FallbackCredentialsFactory.GetCredentials(); + _ = credentials.GetCredentials(); // Will throw if no credentials are configured + } + catch + { + credentials = PromptMissingCredentials(); + } + + AnsiConsole.WriteLine(); + + PatchManifest? patchManifest = null; + string? manifestPath = null; + if (cliContext.TryGetProperty<(PatchManifest, string)>(nameof(PatchManifest), out var manifestTuple)) + { + patchManifest = manifestTuple.Item1; + manifestPath = manifestTuple.Item2; + } + + // Cannot find existing, or we don't want to use it + if (patchManifest == null || manifestPath == null || !PromptExistingManifest()) + { + manifestPath = GetManifestFile(); + using var openManifestFile = File.Open(manifestPath, FileMode.Open); + patchManifest = JsonSerializer.Deserialize( + openManifestFile, + JsonSourceGenerationContext.Default.PatchManifest + ) ?? throw new Exception("Failed to json deserialize manifest file"); + } + + AnsiConsole.MarkupLineInterpolated($"[green]Manifest File[/] - {manifestPath}"); + var table = new Table(); + table.AddColumn("File"); + table.AddColumn("File"); + table.Border(TableBorder.Heavy); + table.Collapse(); + + var manifestFiles = patchManifest.Files.OrderBy(entry => entry.FilePath, StringComparer.OrdinalIgnoreCase).ToArray(); + + var length = manifestFiles.Length; + var halfLength = length / 2; + for (var i = 0; i < halfLength; i++) + { + var manifestFile = manifestFiles[i]; + var markup = Markup.FromInterpolated($"{manifestFile.Command.GetIcon()} {manifestFile.FilePath}"); + + if (halfLength + i >= length) + { + table.AddRow(markup); + continue; + } + + manifestFile = manifestFiles[halfLength + i]; + table.AddRow( + markup, + Markup.FromInterpolated($"{manifestFile.Command.GetIcon()} {manifestFile.FilePath}") + ); + } + + AnsiConsole.Write(table); + + AnsiConsole.WriteLine(); + + var endpointFromEnvironment = Environment.GetEnvironmentVariable("AWS_ENDPOINT_URL"); + + bool agreed; + string? endpointUrl; + do + { + AnsiConsole.Clear(); + + // Use the endpoint from AWS environment variables, otherwise prompt + endpointUrl = endpointFromEnvironment ?? GetUploadProvider() switch + { + UploadProvider.BackBlaze => $"https://s3.{GetBackBlazeRegion()}.backblazeb2.com", + UploadProvider.CloudFlare => $"https://{GetCloudFlareAccountId()}.r2.cloudflarestorage.com", + UploadProvider.DigitalOcean => $"https://{GetDigitalOceanRegion()}.digitaloceanspaces.com", + UploadProvider.Google => "https://storage.googleapis.com", + UploadProvider.Linode => $"https://{GetLinodeRegion()}.linodeobjects.com", + UploadProvider.Other => GetGenericEndpoint(), + _ => null + }; + + var bucket = GetBucket(); + var path = GetBasePath(); + agreed = PromptLooksCorrect(endpointUrl, bucket, path); + } while (!agreed); + + var s3Client = endpointUrl == null + ? new AmazonS3Client(credentials) + : new AmazonS3Client(credentials, new AmazonS3Config { ServiceURL = endpointUrl }); + } +} diff --git a/PatchSync.CLI/Json/SourceGenerationContext.cs b/PatchSync.CLI/Json/SourceGenerationContext.cs new file mode 100644 index 0000000..576c09b --- /dev/null +++ b/PatchSync.CLI/Json/SourceGenerationContext.cs @@ -0,0 +1,10 @@ +using System.Text.Json.Serialization; +using PatchSync.Common.Manifest; + +namespace PatchSync.CLI.Json; + +[JsonSourceGenerationOptions(UseStringEnumConverter = true, DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull)] +[JsonSerializable(typeof(PatchManifest))] +internal partial class JsonSourceGenerationContext : JsonSerializerContext +{ +} diff --git a/PatchSync.CLI/Manifest/ManifestBuilder.cs b/PatchSync.CLI/Manifest/ManifestBuilder.cs new file mode 100644 index 0000000..364e499 --- /dev/null +++ b/PatchSync.CLI/Manifest/ManifestBuilder.cs @@ -0,0 +1,14 @@ +using PatchSync.Common.Manifest; + +namespace PatchSync.Manifest; + +public static class ManifestBuilder +{ + public static PatchManifest GenerateManifest(string channel, ManifestFileEntry[] fileEntries, DateTime date) => + new() + { + Channel = channel, + Date = date, + Files = fileEntries + }; +} diff --git a/PatchSync.CLI/PatchSync.CLI.csproj b/PatchSync.CLI/PatchSync.CLI.csproj new file mode 100644 index 0000000..e5aed48 --- /dev/null +++ b/PatchSync.CLI/PatchSync.CLI.csproj @@ -0,0 +1,23 @@ + + + + Exe + net8.0 + enable + enable + true + true + true + + + + + + + + + + + + + diff --git a/PatchSync.CLI/Program.cs b/PatchSync.CLI/Program.cs new file mode 100644 index 0000000..d12310b --- /dev/null +++ b/PatchSync.CLI/Program.cs @@ -0,0 +1,20 @@ +using System.Text; +using PatchSync.CLI; +using PatchSync.CLI.Commands; +using PatchSync.CLI.Commands.Patch_Installation; + +Console.OutputEncoding = Encoding.UTF8; + +// Register commands +CommandHandler.Register(new BuildSignatures()); +CommandHandler.Register(new PatchInstallation()); +CommandHandler.Register(new UploadSignatures()); +CommandHandler.Register(new QuitCLI()); + +var cliContext = new CLIContext(); + +do +{ + var command = CommandHandler.PromptCommands(); + command.ExecuteCommand(cliContext); +} while (!cliContext.TryGetProperty("exit", out bool exit) || !exit); diff --git a/PatchSync.CLI/Signatures/SignatureGenerator.cs b/PatchSync.CLI/Signatures/SignatureGenerator.cs new file mode 100644 index 0000000..474a556 --- /dev/null +++ b/PatchSync.CLI/Signatures/SignatureGenerator.cs @@ -0,0 +1,56 @@ +using System.IO.MemoryMappedFiles; +using PatchSync.Common; +using PatchSync.SDK.Signatures; +using Spectre.Console; + +namespace PatchSync.Signatures; + +public static class SignatureGenerator +{ + public static (ulong, byte[]) GenerateSignature(string filePath, string signaturePath, int chunkSize, ProgressTask? task) + { + var fi = new FileInfo(filePath); + var fileSize = fi.Length; + + using var mmf = MemoryMappedFile.CreateFromFile( + filePath, + FileMode.Open, + null, + fileSize, + MemoryMappedFileAccess.Read + ); + using var mmStream = mmf.CreateViewStream(0, fileSize, MemoryMappedFileAccess.Read); + using var fileStream = File.Open(signaturePath, FileMode.Create); + + return SignatureFileHandler.CreateSignatureFile( + mmStream, + fileStream, + chunkSize, + signatureResult => + { + switch (signatureResult.ProcessingStatus) + { + case FileProcessingStatus.Started: + { + task.MaxValue(fileSize).StartTask(); + break; + } + case FileProcessingStatus.InProgress: + { + task!.Increment(signatureResult.BytesProcessed); + break; + } + case FileProcessingStatus.Completed: + { + task!.StopTask(); + break; + } + default: + { + throw new ArgumentOutOfRangeException(nameof(signatureResult)); + } + } + } + ); + } +} diff --git a/PatchSync.CLI/Spectre.Console.Addons/FileBrowser.cs b/PatchSync.CLI/Spectre.Console.Addons/FileBrowser.cs new file mode 100644 index 0000000..f33a637 --- /dev/null +++ b/PatchSync.CLI/Spectre.Console.Addons/FileBrowser.cs @@ -0,0 +1,174 @@ +using System.Runtime.InteropServices; +using Spectre.Console; + +namespace PatchSync.CLI; + +public class FileBrowser +{ + private const int PageSize = 15; + private const string LevelUpText = ".."; + private const string CurrentFolder = "Current Folder"; + private const string MoreChoicesText = "Use up and down arrows to select"; + private const string SelectFileText = "[b][green]Select File[/][/]"; + private const string SelectFolderText = "[b][green]Select Folder[/][/]"; + private const string SelectDriveText = "Change Drives"; + private const string SelectActualText = "Select Folder"; + + private static readonly string UserProfilePath = Environment.GetFolderPath(Environment.SpecialFolder.UserProfile); + private static readonly bool IsWindows = RuntimeInformation.IsOSPlatform(OSPlatform.Windows); + + private bool _selectFile; + + public bool DisplayIcons { get; set; } = true; + public string WorkingDirectory { get; set; } = UserProfilePath; + public string? Title { get; set; } + + public string SearchPattern { get; set; } = "*"; + + public string? PromptMessage { get; set; } + + public FileBrowser SelectFile() + { + _selectFile = true; + return this; + } + + public FileBrowser SelectDirectory() + { + _selectFile = false; + return this; + } + + public string GetPath() + { + var selectedPath = InternalGetPath(); + if (!string.IsNullOrEmpty(PromptMessage)) + { + // Can't use interpolated cause it doesn't cascade and we may have markup in the prompt message + AnsiConsole.MarkupLine($"{PromptMessage}: [blue]{selectedPath}[/]"); + } + + return selectedPath; + } + + private string InternalGetPath() + { + var actualFolder = WorkingDirectory; + + var lastFolder = actualFolder; + while (true) + { + string[] directoriesInFolder; + Directory.SetCurrentDirectory(actualFolder); + + var folders = new Dictionary(); + + try + { + directoriesInFolder = Directory.GetDirectories(Directory.GetCurrentDirectory()); + lastFolder = actualFolder; + } + catch + { + actualFolder = actualFolder == lastFolder ? UserProfilePath : lastFolder; + Directory.SetCurrentDirectory(actualFolder); + directoriesInFolder = Directory.GetDirectories(Directory.GetCurrentDirectory()); + } + + if (IsWindows) + { + folders.Add( + DisplayIcons ? $":computer_disk: [green]{SelectDriveText}[/]" : $"[green]{SelectDriveText}[/]", + "/////" + ); + } + + try + { + if (new DirectoryInfo(actualFolder).Parent != null) + { + folders.Add( + DisplayIcons ? $":upwards_button: [green]{LevelUpText}[/]" : $"[green]{LevelUpText}[/]", + new DirectoryInfo(actualFolder).Parent?.FullName! + ); + } + } + catch + { + } + + if (!_selectFile) + { + folders.Add( + DisplayIcons ? $":ok_button: [green]{SelectActualText}[/]" : $"[green]{SelectActualText}[/]", + Directory.GetCurrentDirectory() + ); + } + + foreach (var d in directoriesInFolder) + { + var folderName = d[(actualFolder.Length + (new DirectoryInfo(actualFolder).Parent != null ? 1 : 0))..]; + folders.Add(DisplayIcons ? $":file_folder: {folderName}" : folderName, d); + } + + if (_selectFile) + { + foreach (var file in Directory.EnumerateFiles(actualFolder, SearchPattern)) + { + var result = Path.GetFileName(file); + folders.Add(DisplayIcons ? $":page_facing_up: {result}" : result, file); + } + } + + var title = Title ?? (_selectFile ? SelectFileText : SelectFolderText); + + var selected = AnsiConsole.Prompt( + new SelectionPrompt() + .Title($"{title}: ({CurrentFolder}: [orange3]{actualFolder}[/])") + .PageSize(PageSize) + .MoreChoicesText($"[grey]{MoreChoicesText}[/]") + .AddChoices(folders.Keys) + ); + + lastFolder = actualFolder; + var record = folders[selected]; + + if (record == "/////") + { + record = SelectDrive(); + actualFolder = record; + } + + if (record == Directory.GetCurrentDirectory()) + { + return actualFolder; + } + + try + { + if (Directory.Exists(record)) + { + actualFolder = record; + } + else if (File.Exists(record)) + { + return record; + } + } + catch + { + AnsiConsole.WriteLine("[red]You have no access to this folder[/]"); + } + } + } + + private string SelectDrive() => + AnsiConsole.Prompt( + new SelectionPrompt() + .Title($"[green]{SelectDriveText}:[/]") + .PageSize(PageSize) + .MoreChoicesText($"[grey]{MoreChoicesText}[/]") + .AddChoices(Directory.GetLogicalDrives()) + .UseConverter(drive => DisplayIcons ? $":computer_disk: {drive}" : drive) + ); +} diff --git a/PatchSync.CLI/Threading/ThreadWorker.cs b/PatchSync.CLI/Threading/ThreadWorker.cs new file mode 100644 index 0000000..f83ca6e --- /dev/null +++ b/PatchSync.CLI/Threading/ThreadWorker.cs @@ -0,0 +1,108 @@ +using System.Collections.Concurrent; + +namespace PatchSync.CLI.Threading; + +public class ThreadWorker +{ + private readonly Action _action; + private readonly ConcurrentQueue _entities; + private readonly AutoResetEvent _startEvent; // Main thread tells the thread to start working + private readonly AutoResetEvent _stopEvent; // Main thread waits for the worker finish draining + + private readonly Thread _thread; + private bool _exit; + private bool _pause; + + public ThreadWorker(Action action) + { + _action = action; + _startEvent = new AutoResetEvent(false); + _stopEvent = new AutoResetEvent(false); + _entities = new ConcurrentQueue(); + _thread = new Thread(Execute); + _thread.Start(this); + } + + public static void MapParallel(IEnumerable coll, Action action) + { + var workerCount = Math.Max(Environment.ProcessorCount - 1, 1); + var workers = new ThreadWorker[workerCount]; + for (var i = 0; i < workerCount; i++) + { + workers[i] = new ThreadWorker(action); + workers[i].Wake(); + } + + var index = 0; + foreach (var t in coll) + { + workers[index++].Push(t); + if (index >= workerCount) + { + index = 0; + } + } + + // Pause the workers + foreach (var worker in workers) + { + worker.Exit(); + } + + Array.Clear(workers); + } + + public void Wake() + { + _startEvent.Set(); + } + + public void Sleep() + { + Volatile.Write(ref _pause, true); + _stopEvent.WaitOne(); + } + + public void Exit() + { + _exit = true; + Wake(); + Sleep(); + } + + public void Push(T entity) + { + _entities.Enqueue(entity); + } + + private static void Execute(object obj) + { + var worker = (ThreadWorker)obj; + + var reader = worker._entities; + + while (worker._startEvent.WaitOne()) + { + while (true) + { + var pauseRequested = Volatile.Read(ref worker._pause); + if (reader.TryDequeue(out var t)) + { + worker._action(t); + } + else if (pauseRequested) // Break when finished + { + break; + } + } + + worker._stopEvent.Set(); // Allow the main thread to continue now that we are finished + worker._pause = false; + + if (worker._exit) + { + return; + } + } + } +} diff --git a/PatchSync.CLI/UploadProvider/UploadProvider.cs b/PatchSync.CLI/UploadProvider/UploadProvider.cs new file mode 100644 index 0000000..a59497c --- /dev/null +++ b/PatchSync.CLI/UploadProvider/UploadProvider.cs @@ -0,0 +1,12 @@ +namespace PatchSync.CLI; + +public enum UploadProvider +{ + Other, + Amazon, + BackBlaze, + CloudFlare, + DigitalOcean, + Google, + Linode +} diff --git a/PatchSync.CLI/Utilities.cs b/PatchSync.CLI/Utilities.cs new file mode 100644 index 0000000..2b417ec --- /dev/null +++ b/PatchSync.CLI/Utilities.cs @@ -0,0 +1,28 @@ +using System.Runtime.CompilerServices; + +namespace PatchSync.CLI; + +public static class Utilities +{ + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void ReplaceAny( + this ReadOnlySpan chars, ReadOnlySpan invalidChars, char replacementChar, Span dest + ) + { + while (true) + { + var indexOf = chars.IndexOfAny(invalidChars); + if (indexOf == -1) + { + chars.CopyTo(dest); + break; + } + + chars[..indexOf].CopyTo(dest); + dest[indexOf] = '-'; + + chars = chars[(indexOf + 1)..]; + dest = dest[(indexOf + 1)..]; + } + } +} diff --git a/PatchSync.Common/FileProcessing/FileProcessingResult.cs b/PatchSync.Common/FileProcessing/FileProcessingResult.cs new file mode 100644 index 0000000..ac6cecd --- /dev/null +++ b/PatchSync.Common/FileProcessing/FileProcessingResult.cs @@ -0,0 +1,27 @@ +namespace PatchSync.Common; + +public class FileProcessingResult +{ + public FileProcessingStatus ProcessingStatus { get; private set; } + public long BytesTotal { get; private set; } + public long BytesProcessed { get; private set; } + + public void Started(long bytesTotal) + { + ProcessingStatus = FileProcessingStatus.Started; + BytesTotal = bytesTotal; + BytesProcessed = 0; + } + + public void InProgress(long bytesProcessed) + { + ProcessingStatus = FileProcessingStatus.InProgress; + BytesProcessed = bytesProcessed; + } + + public void Completed() + { + ProcessingStatus = FileProcessingStatus.Completed; + BytesProcessed = BytesTotal; + } +} diff --git a/PatchSync.Common/FileProcessing/FileProcessingStatus.cs b/PatchSync.Common/FileProcessing/FileProcessingStatus.cs new file mode 100644 index 0000000..38feafb --- /dev/null +++ b/PatchSync.Common/FileProcessing/FileProcessingStatus.cs @@ -0,0 +1,8 @@ +namespace PatchSync.Common; + +public enum FileProcessingStatus : byte +{ + Started, + InProgress, + Completed +} diff --git a/PatchSync.Common/Hashing/Adler32RollingChecksum.cs b/PatchSync.Common/Hashing/Adler32RollingChecksum.cs new file mode 100644 index 0000000..7751272 --- /dev/null +++ b/PatchSync.Common/Hashing/Adler32RollingChecksum.cs @@ -0,0 +1,29 @@ +namespace PatchSync.Common.Hashing; + +public static class Adler32RollingChecksum +{ + public static uint Calculate(ReadOnlySpan block) + { + var a = 1; + var b = 0; + for (var i = 0; i < block.Length; i++) + { + var z = block[i]; + a = (ushort)(z + a); + b = (ushort)(b + a); + } + + return (uint)((b << 16) | a); + } + + public static uint Rotate(uint checksum, byte remove, byte add, int chunkSize) + { + var b = (ushort)((checksum >> 16) & 0xffff); + var a = (ushort)(checksum & 0xffff); + + a = (ushort)(a - remove + add); + b = (ushort)(b - chunkSize * remove + a - 1); + + return (uint)((b << 16) | a); + } +} diff --git a/PatchSync.Common/HexStringConverter.cs b/PatchSync.Common/HexStringConverter.cs new file mode 100644 index 0000000..403791f --- /dev/null +++ b/PatchSync.Common/HexStringConverter.cs @@ -0,0 +1,70 @@ +using System.Runtime.CompilerServices; + +namespace PatchSync.Common.Text; + +public static class HexStringConverter +{ + private static readonly uint[] _lookup32Chars = CreateLookup32Chars(); + + private static uint[] CreateLookup32Chars() + { + var result = new uint[256]; + for (var i = 0; i < 256; i++) + { + var s = i.ToString("X2"); + if (BitConverter.IsLittleEndian) + { + result[i] = s[0] + ((uint)s[1] << 16); + } + else + { + result[i] = s[1] + ((uint)s[0] << 16); + } + } + + return result; + } + + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static string ToHexString(this byte[] bytes) => new ReadOnlySpan(bytes).ToHexString(); + + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static string ToHexString(this Span bytes) => ((ReadOnlySpan)bytes).ToHexString(); + + public static unsafe string ToHexString(this ReadOnlySpan bytes) + { + var result = new string((char)0, bytes.Length * 2); + fixed (char* resultP = result) + { + var resultP2 = (uint*)resultP; + for (var i = 0; i < bytes.Length; i++) + { + resultP2[i] = _lookup32Chars[bytes[i]]; + } + } + + return result; + } + + public static unsafe void GetBytes(this string str, Span bytes) + { + fixed (char* strP = str) + { + var i = 0; + var j = 0; + while (i < str.Length) + { + int chr1 = strP[i++]; + int chr2 = strP[i++]; + if (BitConverter.IsLittleEndian) + { + bytes[j++] = (byte)(((chr1 - (chr1 >= 65 ? 55 : 48)) << 4) | (chr2 - (chr2 >= 65 ? 55 : 48))); + } + else + { + bytes[j++] = (byte)((chr1 - (chr1 >= 65 ? 55 : 48)) | ((chr2 - (chr2 >= 65 ? 55 : 48)) << 4)); + } + } + } + } +} diff --git a/PatchSync.Common/IsExternalInit.cs b/PatchSync.Common/IsExternalInit.cs new file mode 100644 index 0000000..eaea2ae --- /dev/null +++ b/PatchSync.Common/IsExternalInit.cs @@ -0,0 +1,8 @@ +using System.ComponentModel; + +namespace System.Runtime.CompilerServices; + +[EditorBrowsable(EditorBrowsableState.Never)] +internal class IsExternalInit +{ +} diff --git a/PatchSync.Common/Manifest/ManifestFileCommand.cs b/PatchSync.Common/Manifest/ManifestFileCommand.cs new file mode 100644 index 0000000..468197f --- /dev/null +++ b/PatchSync.Common/Manifest/ManifestFileCommand.cs @@ -0,0 +1,11 @@ +namespace PatchSync.Common.Manifest; + +public enum ManifestFileCommand +{ + DeltaUpdate, // Default - download the signature file if available and diff to update, otherwise fall back to full download + AlwaysFullUpdate, // Will download/install the full file, no matter what + UpdateIfFullHashMismatch, // Full Update if hash doesn't match - No delta/rolling hash or signature file + UpdateIfMissing, // Only download/install if missing + NeverUpdate, // Only download/install the first time, never again, even if missing + Delete // Deletes the file if it exists, otherwise nothing +} diff --git a/PatchSync.Common/Manifest/ManifestFileCommandExt.cs b/PatchSync.Common/Manifest/ManifestFileCommandExt.cs new file mode 100644 index 0000000..e7e81bf --- /dev/null +++ b/PatchSync.Common/Manifest/ManifestFileCommandExt.cs @@ -0,0 +1,21 @@ +using System.Runtime.CompilerServices; + +namespace PatchSync.Common.Manifest; + +public static class ManifestFileCommandExt +{ + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static string GetIcon(this ManifestFileCommand command) + { + return command switch + { + ManifestFileCommand.DeltaUpdate => ":writing_hand:", + ManifestFileCommand.AlwaysFullUpdate => ":page_facing_up:", + ManifestFileCommand.UpdateIfMissing => ":plus:", + ManifestFileCommand.NeverUpdate => ":prohibited:", + ManifestFileCommand.Delete => ":wastebasket:", + ManifestFileCommand.UpdateIfFullHashMismatch => ":computer_disk:", + _ => throw new ArgumentOutOfRangeException(nameof(command)) + }; + } +} diff --git a/PatchSync.Common/Manifest/ManifestFileEntry.cs b/PatchSync.Common/Manifest/ManifestFileEntry.cs new file mode 100644 index 0000000..53fa2f5 --- /dev/null +++ b/PatchSync.Common/Manifest/ManifestFileEntry.cs @@ -0,0 +1,31 @@ +namespace PatchSync.Common.Manifest; + +public class ManifestFileEntry +{ + public ManifestFileEntry(ManifestFileCommand command, string filePath, long fileSize) + { + Command = command; + FilePath = filePath; + FileSize = fileSize; + } + + // Do we need to add a date concept for UpdateIfNewer? + // We don't actually diff yet, so does this matter? + // Timestamps aren't reliable either. + public ManifestFileCommand Command { get; } + public string FilePath { get; } + + public long FileSize { get; } + + // If it is not the standard chunk size + public int ChunkSize { get; init; } + + // XxHash + public string? FastHash { get; init; } + + // SHA256 - For file integrity checks + public string? Hash { get; init; } + + public static int GetChunkSize(long fileSize) => + fileSize > 1024 * 1024 * 256 ? (int)Utilities.RoundUp((long)Math.Sqrt(fileSize)) : 1024; +} diff --git a/PatchSync.Common/Manifest/PatchChannel.cs b/PatchSync.Common/Manifest/PatchChannel.cs new file mode 100644 index 0000000..e6c05ed --- /dev/null +++ b/PatchSync.Common/Manifest/PatchChannel.cs @@ -0,0 +1,8 @@ +namespace PatchSync.Common.Manifest; + +public enum PatchChannel +{ + Dev, + Beta, + Prod +} diff --git a/PatchSync.Common/Manifest/PatchManifest.cs b/PatchSync.Common/Manifest/PatchManifest.cs new file mode 100644 index 0000000..8ed47ba --- /dev/null +++ b/PatchSync.Common/Manifest/PatchManifest.cs @@ -0,0 +1,12 @@ +namespace PatchSync.Common.Manifest; + +public record PatchManifest +{ + public string Version { get; init; } + + public DateTime Date { get; init; } + + public string Channel { get; init; } + + public ManifestFileEntry[] Files { get; init; } +} diff --git a/PatchSync.Common/PatchSync.Common.csproj b/PatchSync.Common/PatchSync.Common.csproj new file mode 100644 index 0000000..ed61904 --- /dev/null +++ b/PatchSync.Common/PatchSync.Common.csproj @@ -0,0 +1,15 @@ + + + + netstandard2.0;netstandard2.1 + preview + enable + enable + true + + + + + + + diff --git a/PatchSync.Common/Signatures/SignatureChunk.cs b/PatchSync.Common/Signatures/SignatureChunk.cs new file mode 100644 index 0000000..33e6fc4 --- /dev/null +++ b/PatchSync.Common/Signatures/SignatureChunk.cs @@ -0,0 +1,8 @@ +namespace PatchSync.Common.Signatures; + +public record struct SignatureChunk(uint rollingHash, ulong hash) +{ + public uint RollingHash = rollingHash; + + public ulong Hash = hash; +} diff --git a/PatchSync.Common/Signatures/SignatureFile.cs b/PatchSync.Common/Signatures/SignatureFile.cs new file mode 100644 index 0000000..3281900 --- /dev/null +++ b/PatchSync.Common/Signatures/SignatureFile.cs @@ -0,0 +1,13 @@ +namespace PatchSync.Common.Signatures; + +public class SignatureFile(int chunkSize, SignatureChunk[] chunks, byte[]? remainingData = null) +{ + private static readonly byte[] _emptyRemainingData = Array.Empty(); + + public int ChunkSize { get; } = chunkSize; + public SignatureChunk[] Chunks { get; } = chunks; + + // We can't get around this allocation if we use type-safe classes + // For high performance situations, just stream directly to a file/memory/etc + public byte[] RemainingData { get; } = remainingData ?? _emptyRemainingData; +} diff --git a/PatchSync.Common/Utilities.cs b/PatchSync.Common/Utilities.cs new file mode 100644 index 0000000..3e8e567 --- /dev/null +++ b/PatchSync.Common/Utilities.cs @@ -0,0 +1,9 @@ +using System.Runtime.CompilerServices; + +namespace PatchSync.Common; + +public class Utilities +{ + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static long RoundUp(long x) => (x + 7) & -8; +} diff --git a/PatchSync.SDK/Client/PatchSyncClient.cs b/PatchSync.SDK/Client/PatchSyncClient.cs new file mode 100644 index 0000000..d547549 --- /dev/null +++ b/PatchSync.SDK/Client/PatchSyncClient.cs @@ -0,0 +1,53 @@ +using System.Text.Json; +using PatchSync.Common.Manifest; +using PatchSync.Common.Signatures; +using PatchSync.SDK.Signatures; + +namespace PatchSync.SDK.Client; + +public class PatchSyncClient(string baseUri) +{ + public PatchManifest Manifest { get; private set; } + + private static async Task DownloadFile(string baseUri, string relativeUri) + { + using var httpClient = HttpHandler.CreateHttpClient(); + var url = new Uri($"{baseUri}/{relativeUri}"); + var response = await httpClient.GetAsync(url); + + // Throw if not successful + response.EnsureSuccessStatusCode(); + + return await response.Content.ReadAsStreamAsync(); + } + + public async Task GetPatchManifest(string relativeUri) + { + var stream = await DownloadFile(baseUri, relativeUri); + Manifest = await JsonSerializer.DeserializeAsync(stream, JsonSerializerOptions.Default) ?? + throw new InvalidOperationException("Could not properly download the manifest."); + +#if NETSTANDARD2_1_OR_GREATER + await stream.DisposeAsync(); +#else + stream.Dispose(); +#endif + return Manifest; + } + + public async Task GetSignatureFile(int originalFileSize, string relativeUri, int chunkSize) + { + var stream = await DownloadFile(baseUri, relativeUri); + var signatureFile = SignatureFileHandler.LoadSignature(originalFileSize, stream, chunkSize); + +#if NETSTANDARD2_1_OR_GREATER + await stream.DisposeAsync(); +#else + stream.Dispose(); +#endif + return signatureFile; + } + + public void ValidateFiles(string baseFolder, Func callback) => + FileValidator.ValidateFiles(Manifest, baseFolder, callback); +} diff --git a/PatchSync.SDK/FilePatcher/FilePatcher.cs b/PatchSync.SDK/FilePatcher/FilePatcher.cs new file mode 100644 index 0000000..6f52aef --- /dev/null +++ b/PatchSync.SDK/FilePatcher/FilePatcher.cs @@ -0,0 +1,151 @@ +using System.IO.Hashing; +using PatchSync.Common; +using PatchSync.Common.Hashing; +using PatchSync.Common.Signatures; + +namespace PatchSync.SDK; + +public static class FilePatcher +{ + public static PatchSlice[] GetPatchDeltas( + Stream inputStream, // File being processed + SignatureFile signatureFile, + Action? callback = null + ) + { + var length = inputStream.Length; + var chunks = signatureFile.Chunks; + var chunkSize = signatureFile.ChunkSize; // Should be already rounded + + var processingResult = new FileProcessingResult(); + processingResult.Started(length); + callback?.Invoke(processingResult); + + var slices = new PatchSlice?[chunks.Length]; + + // Special case where the local file is smaller than a chunk + if (length < chunkSize) + { + for (var i = 0; i < chunks.Length; i++) + { + slices[i] = new PatchSlice(PatchSliceLocation.RemoteSlice, i * chunkSize); + } + + return slices; + } + + // rolling hash -> index in signature file array + var chunksByRollingHash = new Dictionary>(); + for (var i = 0; i < chunks.Length; i++) + { + var signatureFileChunk = chunks[i]; + var rollingHash = signatureFileChunk.RollingHash; + if (!chunksByRollingHash.TryGetValue(rollingHash, out var list)) + { + chunksByRollingHash[rollingHash] = list = []; + } + + bool found = false; + foreach (var index in list) + { + // We can have two chunks with the same exactly rolling/hash + if (chunks[index] == signatureFileChunk) + { + found = true; + break; + } + } + + if (!found) + { + list.Add(i); + } + } + + var xxHash3 = new XxHash3(); + var chunkBuffer = new byte[chunkSize]; + + var checksum = 1u; + + var bufferIndex = 0; + + // Find the existing chunks + for (var i = 0; i < length; i++) + { + if (i < chunkSize) + { + if (inputStream.Read(chunkBuffer, 0, chunkBuffer.Length) != chunkBuffer.Length) + { + throw new Exception("Could not read the entire chunk."); + } + + checksum = Adler32RollingChecksum.Calculate(chunkBuffer); + var chunkSizeMinusOne = chunkSize - 1; + i += chunkSizeMinusOne; // -1 cause we have a i++ at the end of the for loop + + processingResult.InProgress(chunkSizeMinusOne); + callback?.Invoke(processingResult); + } + else + { + checksum = Adler32RollingChecksum.Rotate( + checksum, + chunkBuffer[bufferIndex], + chunkBuffer[bufferIndex++] = (byte)inputStream.ReadByte(), + chunkSize + ); + + // Circular buffer + if (bufferIndex >= chunkSize) + { + bufferIndex = 0; + } + } + + // Check if we have an existing chunk, if not, continue to the next byte + if (!chunksByRollingHash.TryGetValue(checksum, out var list)) + { + processingResult.InProgress(1); + callback?.Invoke(processingResult); + continue; + } + + // Our chunk is not really in order + xxHash3.Append(chunkBuffer.AsSpan(bufferIndex, chunkSize - bufferIndex)); + if (bufferIndex != 0) + { + xxHash3.Append(chunkBuffer.AsSpan(0, bufferIndex)); + } + + var hash = xxHash3.GetCurrentHashAsUInt64(); + xxHash3.Reset(); + + foreach (var entryIndex in list) + { + var entry = chunks[entryIndex]; + if (entry.RollingHash == checksum && entry.Hash == hash && slices[entryIndex] == null) + { + // We are at the end of the offset, so subtract chunkSize + slices[entryIndex] = new PatchSlice(PatchSliceLocation.ExistingSlice, inputStream.Position - chunkSize); + break; + } + } + + // processingResult.InProgress(1); + // callback?.Invoke(processingResult); + } + + // Go through each slice, identifying if it comes from a local offset, or remote download + for (var i = 0; i < slices.Length; i++) + { + if (slices[i] == null) + { + slices[i] = new PatchSlice(PatchSliceLocation.RemoteSlice, i * chunkSize); + } + } + + processingResult.Completed(); + callback?.Invoke(processingResult); + return slices; + } +} diff --git a/PatchSync.SDK/FilePatcher/FileValidator.cs b/PatchSync.SDK/FilePatcher/FileValidator.cs new file mode 100644 index 0000000..b956439 --- /dev/null +++ b/PatchSync.SDK/FilePatcher/FileValidator.cs @@ -0,0 +1,51 @@ +using System.Collections.Concurrent; +using System.IO.Hashing; +using System.IO.MemoryMappedFiles; +using PatchSync.Common.Manifest; + +namespace PatchSync.SDK; + +public static class FileValidator +{ + public static ManifestFileEntry[] ValidateFiles( + PatchManifest manifest, string baseFolder, Func? progressCallback = null + ) + { + if (manifest.Files.Length == 0) + { + return Array.Empty(); + } + + var queue = new ConcurrentQueue(); + var concurrency = Math.Max(Environment.ProcessorCount, 1); + + Parallel.ForEach( + manifest.Files, + new ParallelOptions { MaxDegreeOfParallelism = concurrency }, + file => ValidateManifestFileEntry(baseFolder, file) + ); + + return queue.ToArray(); + } + + public static bool ValidateFastHash(Stream inputStream, string fastHash) + { + var xxHash3 = new XxHash3(); + xxHash3.Append(inputStream); + + // Reset stream + inputStream.Seek(0, SeekOrigin.Begin); + + var hash = xxHash3.GetCurrentHashAsUInt64(); + return hash.ToString() == fastHash; + } + + private static void ValidateManifestFileEntry(string baseFolder, ManifestFileEntry file) + { + var xxHash3 = new XxHash3(); + using var mmf = MemoryMappedFile.CreateFromFile(file.FilePath, FileMode.Open); + using var mmStream = mmf.CreateViewStream(); + + // if (xxHash3.) + } +} diff --git a/PatchSync.SDK/FilePatcher/PatchSlice.cs b/PatchSync.SDK/FilePatcher/PatchSlice.cs new file mode 100644 index 0000000..cea4eaf --- /dev/null +++ b/PatchSync.SDK/FilePatcher/PatchSlice.cs @@ -0,0 +1,13 @@ +namespace PatchSync.SDK; + +public record PatchSlice +{ + public PatchSliceLocation Location { get; } + public long Offset { get; } + + public PatchSlice(PatchSliceLocation location, long offset) + { + Location = location; + Offset = offset; + } +} diff --git a/PatchSync.SDK/FilePatcher/PatchSliceLocation.cs b/PatchSync.SDK/FilePatcher/PatchSliceLocation.cs new file mode 100644 index 0000000..bd06a69 --- /dev/null +++ b/PatchSync.SDK/FilePatcher/PatchSliceLocation.cs @@ -0,0 +1,7 @@ +namespace PatchSync.SDK; + +public enum PatchSliceLocation +{ + ExistingSlice, + RemoteSlice +} diff --git a/PatchSync.SDK/FilePatcher/ValidationResult.cs b/PatchSync.SDK/FilePatcher/ValidationResult.cs new file mode 100644 index 0000000..7282d1d --- /dev/null +++ b/PatchSync.SDK/FilePatcher/ValidationResult.cs @@ -0,0 +1,30 @@ +namespace PatchSync.SDK; + +public record ValidationResult +{ + private ValidationResult() + { + } + + public ValidationStatus Status { get; private init; } + public string? Hash { get; private init; } + public string? Message { get; private init; } + + public static ValidationResult InProgress(string message) => new() + { + Status = ValidationStatus.InProgress, + Message = message + }; + + public static ValidationResult Valid(string hash) => new() + { + Status = ValidationStatus.Valid, + Hash = hash + }; + + public static ValidationResult Invalid(string? message = null) => new() + { + Status = ValidationStatus.Invalid, + Message = message + }; +} diff --git a/PatchSync.SDK/FilePatcher/ValidationStatus.cs b/PatchSync.SDK/FilePatcher/ValidationStatus.cs new file mode 100644 index 0000000..71b3c95 --- /dev/null +++ b/PatchSync.SDK/FilePatcher/ValidationStatus.cs @@ -0,0 +1,8 @@ +namespace PatchSync.SDK; + +public enum ValidationStatus +{ + InProgress, + Valid, + Invalid +} diff --git a/PatchSync.SDK/HttpHandler/HttpHandler.cs b/PatchSync.SDK/HttpHandler/HttpHandler.cs new file mode 100644 index 0000000..76fcc21 --- /dev/null +++ b/PatchSync.SDK/HttpHandler/HttpHandler.cs @@ -0,0 +1,33 @@ +using Microsoft.Extensions.Http; +using Polly; +using Polly.Extensions.Http; + +namespace PatchSync.SDK; + +public static class HttpHandler +{ + private static PolicyHttpMessageHandler? _policyHandler; + + public static HttpClient CreateHttpClient() + { + if (_policyHandler == null) + { + var retryPolicy = HttpPolicyExtensions + .HandleTransientHttpError() + .WaitAndRetryAsync(3, retryAttempt => TimeSpan.FromSeconds(Math.Pow(2, retryAttempt))); + +#if NET6_0_OR_GREATER + var socketHandler = new SocketsHttpHandler { PooledConnectionLifetime = TimeSpan.FromMinutes(3) }; +#else + var socketHandler = new HttpClientHandler(); +#endif + + _policyHandler = new PolicyHttpMessageHandler(retryPolicy) + { + InnerHandler = socketHandler + }; + } + + return new HttpClient(_policyHandler); + } +} diff --git a/PatchSync.SDK/IsExternalInit.cs b/PatchSync.SDK/IsExternalInit.cs new file mode 100644 index 0000000..eaea2ae --- /dev/null +++ b/PatchSync.SDK/IsExternalInit.cs @@ -0,0 +1,8 @@ +using System.ComponentModel; + +namespace System.Runtime.CompilerServices; + +[EditorBrowsable(EditorBrowsableState.Never)] +internal class IsExternalInit +{ +} diff --git a/PatchSync.SDK/PatchSync.SDK.csproj b/PatchSync.SDK/PatchSync.SDK.csproj new file mode 100644 index 0000000..ff9a170 --- /dev/null +++ b/PatchSync.SDK/PatchSync.SDK.csproj @@ -0,0 +1,22 @@ + + + + netstandard2.0;netstandard2.1;net8.0 + preview + enable + enable + true + + + + + + + + + + + + + + diff --git a/PatchSync.SDK/Signatures/SignatureFileHandler.Deserializer.cs b/PatchSync.SDK/Signatures/SignatureFileHandler.Deserializer.cs new file mode 100644 index 0000000..b15a53a --- /dev/null +++ b/PatchSync.SDK/Signatures/SignatureFileHandler.Deserializer.cs @@ -0,0 +1,67 @@ +using System.Buffers.Binary; +using System.Runtime.InteropServices; +using PatchSync.Common.Signatures; +#if NET6_0_OR_GREATER +using System.Runtime.CompilerServices; +#endif + +namespace PatchSync.SDK.Signatures; + +#if NET6_0_OR_GREATER +[SkipLocalsInit] +#endif +public static partial class SignatureFileHandler +{ + public static SignatureFile LoadSignature(long originalFileSize, Stream stream, int chunkSize) + { + var chunkCount = Math.DivRem(originalFileSize, chunkSize, out var lastChunkSize); + + if (chunkCount < 1) + { + throw new InvalidOperationException("File size is too small for a signature file."); + } + +#if NETSTANDARD2_1_OR_GREATER + Span chunk = stackalloc byte[12]; +#else + var chunk = new byte[12]; +#endif + + var chunks = new SignatureChunk[chunkCount]; + for (var i = 0; i < chunkCount; i++) + { +#if NETSTANDARD2_1_OR_GREATER + if (stream.Read(chunk) != chunk.Length) +#else + if (stream.Read(chunk, 0, 12) != chunk.Length) +#endif + { + throw new InvalidOperationException("Reached end of stream prematurely"); + } + + chunks[i] = new SignatureChunk + { + RollingHash = BinaryPrimitives.ReadUInt32LittleEndian(chunk), +#if NETSTANDARD2_1_OR_GREATER + Hash = BinaryPrimitives.ReadUInt64LittleEndian(chunk[4..]) +#else + Hash = BinaryPrimitives.ReadUInt64LittleEndian(chunk.AsSpan(4)) +#endif + }; + } + + // Last remaining data + if (lastChunkSize == 0) + { + return new SignatureFile(chunkSize, chunks); + } + + var remainingChunk = new byte[lastChunkSize]; + if (stream.Read(remainingChunk, 0, (int)lastChunkSize) != lastChunkSize) + { + throw new InvalidOperationException("Reached end of stream prematurely"); + } + + return new SignatureFile(chunkSize, chunks, remainingChunk); + } +} diff --git a/PatchSync.SDK/Signatures/SignatureFileHandler.Generator.cs b/PatchSync.SDK/Signatures/SignatureFileHandler.Generator.cs new file mode 100644 index 0000000..d76986e --- /dev/null +++ b/PatchSync.SDK/Signatures/SignatureFileHandler.Generator.cs @@ -0,0 +1,241 @@ +using System.IO.Hashing; +using System.Runtime.CompilerServices; +using System.Security.Cryptography; +using PatchSync.Common; +using PatchSync.Common.Hashing; +using PatchSync.Common.Signatures; + +namespace PatchSync.SDK.Signatures; + +public static partial class SignatureFileHandler +{ + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static int RoundUp(int x) => (x + 7) & -8; + + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static int GetDefaultChunkSize(long length) => +#if NET6_0_OR_GREATER + Math.Clamp(RoundUp((int)Math.Sqrt(length)), 704, 65528); +#else + Math.Min(Math.Max(RoundUp((int)Math.Sqrt(length)), 704), 65528); +#endif + + public static SignatureFile GenerateSignature( + ReadOnlySpan source, + int chunkSize = -1, + Action? callback = null + ) + { + var length = source.Length; + + var processingResult = new FileProcessingResult(); + processingResult.Started(length); + callback?.Invoke(processingResult); + + var hasher = new XxHash3(); + chunkSize = chunkSize <= 0 ? GetDefaultChunkSize(length) : RoundUp(chunkSize); + + var chunkCount = Math.DivRem(length, chunkSize, out var lastChunkSize); + var chunks = new SignatureChunk[chunkCount + (lastChunkSize >= 12 ? 1 : 0)]; + byte[]? remainingData = null; + + var pos = 0; + for (var i = 0; i < chunkCount; i++) + { + var bytes = source.Slice(pos, chunkSize); + pos += chunkSize; + + chunks[i] = GetChunk(bytes, hasher); + processingResult.InProgress(chunkSize); + callback?.Invoke(processingResult); + } + + if (lastChunkSize > 0) + { + var bytes = source.Slice(pos, lastChunkSize); + + if (lastChunkSize >= 12) + { + chunks[chunks.Length - 1] = GetChunk(bytes, hasher); + } + else + { + remainingData = bytes.ToArray(); + } + + processingResult.InProgress(chunkSize); + callback?.Invoke(processingResult); + } + + processingResult.Completed(); + callback?.Invoke(processingResult); + + return new SignatureFile(chunkSize, chunks, remainingData); + } + + public static SignatureFile GenerateSignature( + Stream stream, + int chunkSize = -1, + Action? callback = null + ) + { + var length = stream.Length; + var processingResult = new FileProcessingResult(); + processingResult.Started(length); + callback?.Invoke(processingResult); + + var hasher = new XxHash3(); + chunkSize = chunkSize <= 0 ? GetDefaultChunkSize(length) : RoundUp(chunkSize); + var chunk = new byte[chunkSize]; + + var chunkCount = Math.DivRem(length, chunkSize, out var lastChunkSize); + var chunks = new SignatureChunk[chunkCount + (lastChunkSize >= 12 ? 1 : 0)]; + byte[]? remainingData = null; + + for (var i = 0; i < chunkCount; i++) + { +#if NETSTANDARD2_1_OR_GREATER + if (stream.Read(chunk) != chunkSize) +#else + if (stream.Read(chunk, 0, chunkSize) != chunk.Length) +#endif + { + throw new InvalidOperationException("Reached end of stream prematurely"); + } + + chunks[i] = GetChunk(chunk, hasher); + processingResult.InProgress(chunk.Length); + callback?.Invoke(processingResult); + } + + if (lastChunkSize > 0) + { +#if NETSTANDARD2_1_OR_GREATER + if (stream.Read(chunk) != chunkSize) +#else + if (stream.Read(chunk, 0, (int)lastChunkSize) != lastChunkSize) +#endif + { + throw new InvalidOperationException("Reached end of stream prematurely"); + } + + if (lastChunkSize >= 12) + { + chunks[chunks.Length - 1] = GetChunk(chunk, hasher); + } + else + { +#if NETSTANDARD2_1_OR_GREATER + remainingData = chunk[..(int)lastChunkSize].ToArray(); +#else + remainingData = chunk.AsSpan(0, (int)lastChunkSize).ToArray(); +#endif + } + + processingResult.InProgress(lastChunkSize); + callback?.Invoke(processingResult); + } + + processingResult.Completed(); + callback?.Invoke(processingResult); + + return new SignatureFile(chunkSize, chunks, remainingData); + } + + public static (ulong, byte[]) CreateSignatureFile( + Stream inputStream, + Stream outputStream, + int chunkSize = -1, + Action? callback = null + ) + { + var length = inputStream.Length; + var processingResult = new FileProcessingResult(); + processingResult.Started(length); + + callback?.Invoke(processingResult); + + using var binaryWriter = new BinaryWriter(outputStream); + + using var fullHasher = SHA256.Create(); + var fastHasher = new XxHash3(); + var hasher = new XxHash3(); + chunkSize = chunkSize <= 0 ? GetDefaultChunkSize(length) : RoundUp(chunkSize); + + var chunk = new byte[chunkSize]; + + var chunkCount = Math.DivRem(length, chunkSize, out var lastChunkSize); + + for (var i = 0; i < chunkCount; i++) + { + if (inputStream.Read(chunk, 0, chunkSize) != chunk.Length) + { + throw new InvalidOperationException("Reached end of stream prematurely"); + } + + WriteChunk(chunk, chunkSize, hasher, binaryWriter); + + fastHasher.Append(chunk); + fullHasher.TransformBlock(chunk, 0, chunkSize, null, 0); + + processingResult.InProgress(chunkSize); + callback?.Invoke(processingResult); + } + + if (lastChunkSize > 0) + { + if (inputStream.Read(chunk, 0, (int)lastChunkSize) != lastChunkSize) + { + throw new InvalidOperationException("Reached end of stream prematurely"); + } + + binaryWriter.Write(chunk, 0, (int)lastChunkSize); + + fastHasher.Append(chunk.AsSpan(0, (int)lastChunkSize)); + fullHasher.TransformFinalBlock(chunk, 0, (int)lastChunkSize); + + processingResult.InProgress(lastChunkSize); + callback?.Invoke(processingResult); + } + else + { + fullHasher.TransformFinalBlock(chunk, 0, 0); + } + + processingResult.Completed(); + callback?.Invoke(processingResult); + + return (fastHasher.GetCurrentHashAsUInt64(), fullHasher.Hash); + } + + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static SignatureChunk GetChunk(ReadOnlySpan bytes, XxHash3 hasher) + { + hasher.Append(bytes); + var hash = hasher.GetCurrentHashAsUInt64(); + hasher.Reset(); + + return new SignatureChunk(Adler32RollingChecksum.Calculate(bytes), hash); + } + + [MethodImpl(MethodImplOptions.AggressiveInlining)] + private static void WriteChunk(byte[] chunk, int length, XxHash3 hash, BinaryWriter binaryWriter) + { + if (chunk.Length <= 12) + { + binaryWriter.Write(chunk, 0, length); + return; + } + + // Write rolling checksum + var adlerChecksum = Adler32RollingChecksum.Calculate(chunk.AsSpan(0, length)); + binaryWriter.Write(adlerChecksum); + + hash.Append(chunk); + + // Write hash + binaryWriter.Write(hash.GetCurrentHashAsUInt64()); + + hash.Reset(); + } +} diff --git a/PatchSync.sln b/PatchSync.sln new file mode 100644 index 0000000..690cd8a --- /dev/null +++ b/PatchSync.sln @@ -0,0 +1,28 @@ + +Microsoft Visual Studio Solution File, Format Version 12.00 +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "PatchSync.CLI", "PatchSync.CLI\PatchSync.CLI.csproj", "{5D41C62D-3E61-4BBE-9C9A-A3C69ECCB31F}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "PatchSync.Common", "PatchSync.Common\PatchSync.Common.csproj", "{836FCA55-4876-4946-836E-C6C242658F85}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "PatchSync.SDK", "PatchSync.SDK\PatchSync.SDK.csproj", "{133A976B-525A-4BDD-9720-690C1CCB9392}" +EndProject +Global + GlobalSection(SolutionConfigurationPlatforms) = preSolution + Debug|Any CPU = Debug|Any CPU + Release|Any CPU = Release|Any CPU + EndGlobalSection + GlobalSection(ProjectConfigurationPlatforms) = postSolution + {5D41C62D-3E61-4BBE-9C9A-A3C69ECCB31F}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {5D41C62D-3E61-4BBE-9C9A-A3C69ECCB31F}.Debug|Any CPU.Build.0 = Debug|Any CPU + {5D41C62D-3E61-4BBE-9C9A-A3C69ECCB31F}.Release|Any CPU.ActiveCfg = Release|Any CPU + {5D41C62D-3E61-4BBE-9C9A-A3C69ECCB31F}.Release|Any CPU.Build.0 = Release|Any CPU + {836FCA55-4876-4946-836E-C6C242658F85}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {836FCA55-4876-4946-836E-C6C242658F85}.Debug|Any CPU.Build.0 = Debug|Any CPU + {836FCA55-4876-4946-836E-C6C242658F85}.Release|Any CPU.ActiveCfg = Release|Any CPU + {836FCA55-4876-4946-836E-C6C242658F85}.Release|Any CPU.Build.0 = Release|Any CPU + {133A976B-525A-4BDD-9720-690C1CCB9392}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {133A976B-525A-4BDD-9720-690C1CCB9392}.Debug|Any CPU.Build.0 = Debug|Any CPU + {133A976B-525A-4BDD-9720-690C1CCB9392}.Release|Any CPU.ActiveCfg = Release|Any CPU + {133A976B-525A-4BDD-9720-690C1CCB9392}.Release|Any CPU.Build.0 = Release|Any CPU + EndGlobalSection +EndGlobal