- tweak: loading popup thinks

* - tweak: change loading handle logic

* - tweak: beautify loading thinks

* - fix: speed thinks while downloading
This commit is contained in:
Cinkafox
2025-12-06 23:25:25 +03:00
committed by GitHub
parent d7f775e80c
commit 0c6bbaadac
39 changed files with 710 additions and 491 deletions

View File

@@ -1,5 +1,7 @@
using System.Diagnostics.CodeAnalysis;
using Nebula.Shared.FileApis.Interfaces;
using Nebula.Shared.Models;
using Nebula.Shared.Utils;
namespace Nebula.Shared.FileApis;
@@ -31,7 +33,7 @@ public sealed class FileApi : IReadWriteFileApi
return false;
}
public bool Save(string path, Stream input)
public bool Save(string path, Stream input, ILoadingHandler? loadingHandler = null)
{
var currPath = Path.Join(RootPath, path);
@@ -41,6 +43,13 @@ public sealed class FileApi : IReadWriteFileApi
if (!dirInfo.Exists) dirInfo.Create();
using var stream = new FileStream(currPath, FileMode.Create, FileAccess.Write, FileShare.None);
if (loadingHandler != null)
{
input.CopyTo(stream, loadingHandler);
return true;
}
input.CopyTo(stream);
return true;
}

View File

@@ -47,8 +47,8 @@ public class HashApi : IFileApi
return false;
}
public bool Save(RobustManifestItem item, Stream stream){
return _fileApi.Save(GetManifestPath(item), stream);
public bool Save(RobustManifestItem item, Stream stream, ILoadingHandler? loadingHandler){
return _fileApi.Save(GetManifestPath(item), stream, loadingHandler);
}
public bool Has(RobustManifestItem item){

View File

@@ -1,8 +1,10 @@
namespace Nebula.Shared.FileApis.Interfaces;
using Nebula.Shared.Models;
namespace Nebula.Shared.FileApis.Interfaces;
public interface IWriteFileApi
{
public bool Save(string path, Stream input);
public bool Save(string path, Stream input, ILoadingHandler? loadingHandler = null);
public bool Remove(string path);
public bool Has(string path);
}

View File

@@ -1,20 +1,20 @@
namespace Nebula.Shared.Models;
public interface ILoadingHandler
public interface ILoadingHandler : IDisposable
{
public void SetJobsCount(int count);
public int GetJobsCount();
public void SetJobsCount(long count);
public long GetJobsCount();
public void SetResolvedJobsCount(int count);
public int GetResolvedJobsCount();
public void SetResolvedJobsCount(long count);
public long GetResolvedJobsCount();
public void SetLoadingMessage(string message);
public void AppendJob(int count = 1)
public void AppendJob(long count = 1)
{
SetJobsCount(GetJobsCount() + count);
}
public void AppendResolvedJob(int count = 1)
public void AppendResolvedJob(long count = 1)
{
SetResolvedJobsCount(GetResolvedJobsCount() + count);
}
@@ -31,6 +31,57 @@ public interface ILoadingHandler
}
}
public interface ILoadingFormater
{
public string Format(ILoadingHandler loadingHandler);
}
public interface ILoadingHandlerFactory: IDisposable
{
public ILoadingHandler CreateLoadingContext(ILoadingFormater? loadingFormater = null);
}
public interface IConnectionSpeedHandler
{
public void PasteSpeed(int speed);
}
public sealed class DefaultLoadingFormater : ILoadingFormater
{
public static DefaultLoadingFormater Instance = new DefaultLoadingFormater();
public string Format(ILoadingHandler loadingHandler)
{
return loadingHandler.GetResolvedJobsCount() + "/" + loadingHandler.GetJobsCount();
}
}
public sealed class FileLoadingFormater : ILoadingFormater
{
public string Format(ILoadingHandler loadingHandler)
{
return FormatBytes(loadingHandler.GetResolvedJobsCount()) + " / " + FormatBytes(loadingHandler.GetJobsCount());
}
public static string FormatBytes(long bytes)
{
const long KB = 1024;
const long MB = KB * 1024;
const long GB = MB * 1024;
const long TB = GB * 1024;
if (bytes >= TB)
return $"{bytes / (double)TB:0.##} TB";
if (bytes >= GB)
return $"{bytes / (double)GB:0.##} GB";
if (bytes >= MB)
return $"{bytes / (double)MB:0.##} MB";
if (bytes >= KB)
return $"{bytes / (double)KB:0.##} KB";
return $"{bytes} B";
}
}
public sealed class QueryJob : IDisposable
{
private readonly ILoadingHandler _handler;

View File

@@ -32,19 +32,18 @@ public class ConfigurationService
ConfigurationApi = fileService.CreateFileApi("config");
}
public void MigrateConfigs(ILoadingHandler loadingHandler)
public void MigrateConfigs(ILoadingHandlerFactory loadingHandlerFactory)
{
Task.Run(async () =>
{
var loadingHandler = loadingHandlerFactory.CreateLoadingContext();
foreach (var migration in _migrations)
{
await migration.DoMigrate(this, _serviceProvider, loadingHandler);
}
if (loadingHandler is IDisposable disposable)
{
disposable.Dispose();
}
loadingHandler.Dispose();
loadingHandlerFactory.Dispose();
});
}

View File

@@ -1,4 +1,5 @@
using System.Buffers.Binary;
using System.Diagnostics;
using System.Globalization;
using System.Net.Http.Headers;
using System.Numerics;
@@ -34,11 +35,10 @@ public partial class ContentService
}
public async Task<HashApi> EnsureItems(ManifestReader manifestReader, Uri downloadUri,
ILoadingHandler loadingHandler,
ILoadingHandlerFactory loadingFactory,
CancellationToken cancellationToken)
{
List<RobustManifestItem> allItems = [];
List<RobustManifestItem> items = [];
while (manifestReader.TryReadItem(out var item))
{
@@ -50,40 +50,44 @@ public partial class ContentService
var hashApi = CreateHashApi(allItems);
items = allItems.Where(a=> !hashApi.Has(a)).ToList();
loadingHandler.SetLoadingMessage("Download Count:" + items.Count);
var items = allItems.Where(a=> !hashApi.Has(a)).ToList();
_logger.Log("Download Count:" + items.Count);
await Download(downloadUri, items, hashApi, loadingHandler, cancellationToken);
await Download(downloadUri, items, hashApi, loadingFactory, cancellationToken);
return hashApi;
}
public async Task<HashApi> EnsureItems(RobustManifestInfo info, ILoadingHandler loadingHandler,
public async Task<HashApi> EnsureItems(RobustManifestInfo info, ILoadingHandlerFactory loadingFactory,
CancellationToken cancellationToken)
{
_logger.Log("Getting manifest: " + info.Hash);
loadingHandler.SetLoadingMessage("Getting manifest: " + info.Hash);
var loadingHandler = loadingFactory.CreateLoadingContext(new FileLoadingFormater());
loadingHandler.SetLoadingMessage("Loading manifest");
if (ManifestFileApi.TryOpen(info.Hash, out var stream))
{
_logger.Log("Loading manifest from: " + info.Hash);
return await EnsureItems(new ManifestReader(stream), info.DownloadUri, loadingHandler, cancellationToken);
_logger.Log("Loading manifest from disk");
loadingHandler.Dispose();
return await EnsureItems(new ManifestReader(stream), info.DownloadUri, loadingFactory, cancellationToken);
}
SetServerHash(info.ManifestUri.ToString(), info.Hash);
_logger.Log("Fetching manifest from: " + info.ManifestUri);
loadingHandler.SetLoadingMessage("Fetching manifest from: " + info.ManifestUri);
loadingHandler.SetLoadingMessage("Fetching manifest from: " + info.ManifestUri.Host);
var response = await _http.GetAsync(info.ManifestUri, cancellationToken);
if (!response.IsSuccessStatusCode) throw new Exception();
response.EnsureSuccessStatusCode();
loadingHandler.SetJobsCount(response.Content.Headers.ContentLength ?? 0);
await using var streamContent = await response.Content.ReadAsStreamAsync(cancellationToken);
ManifestFileApi.Save(info.Hash, streamContent);
ManifestFileApi.Save(info.Hash, streamContent, loadingHandler);
loadingHandler.Dispose();
streamContent.Seek(0, SeekOrigin.Begin);
using var manifestReader = new ManifestReader(streamContent);
return await EnsureItems(manifestReader, info.DownloadUri, loadingHandler, cancellationToken);
return await EnsureItems(manifestReader, info.DownloadUri, loadingFactory, cancellationToken);
}
public void Unpack(HashApi hashApi, IWriteFileApi otherApi, ILoadingHandler loadingHandler)
@@ -107,30 +111,22 @@ public partial class ContentService
}
else
{
_logger.Error("OH FUCK!! " + item.Path);
_logger.Error("Error while unpacking thinks " + item.Path);
}
loadingHandler.AppendResolvedJob();
});
if (loadingHandler is IDisposable disposable)
{
disposable.Dispose();
}
}
public async Task Download(Uri contentCdn, List<RobustManifestItem> toDownload, HashApi hashApi, ILoadingHandler loadingHandler,
private async Task Download(Uri contentCdn, List<RobustManifestItem> toDownload, HashApi hashApi, ILoadingHandlerFactory loadingHandlerFactory,
CancellationToken cancellationToken)
{
if (toDownload.Count == 0 || cancellationToken.IsCancellationRequested)
{
_logger.Log("Nothing to download! Fuck this!");
_logger.Log("Nothing to download! Skip!");
return;
}
var downloadJobWatch = loadingHandler.GetQueryJob();
loadingHandler.SetLoadingMessage("Downloading from: " + contentCdn);
_logger.Log("Downloading from: " + contentCdn);
var requestBody = new byte[toDownload.Count * 4];
@@ -152,70 +148,56 @@ public partial class ContentService
request.Headers.AcceptEncoding.Add(new StringWithQualityHeaderValue("zstd"));
var response = await _http.SendAsync(request, HttpCompletionOption.ResponseHeadersRead, cancellationToken);
if (cancellationToken.IsCancellationRequested)
{
_logger.Log("Downloading cancelled!");
return;
}
downloadJobWatch.Dispose();
response.EnsureSuccessStatusCode();
var stream = await response.Content.ReadAsStreamAsync();
var bandwidthStream = new BandwidthStream(stream);
stream = bandwidthStream;
var stream = await response.Content.ReadAsStreamAsync(cancellationToken);
if (response.Content.Headers.ContentEncoding.Contains("zstd"))
stream = new ZStdDecompressStream(stream);
await using var streamDispose = stream;
// Read flags header
var streamHeader = await stream.ReadExactAsync(4, null);
var streamHeader = await stream.ReadExactAsync(4, cancellationToken);
var streamFlags = (DownloadStreamHeaderFlags)BinaryPrimitives.ReadInt32LittleEndian(streamHeader);
var preCompressed = (streamFlags & DownloadStreamHeaderFlags.PreCompressed) != 0;
// compressContext.SetParameter(ZSTD_cParameter.ZSTD_c_nbWorkers, 4);
// If the stream is pre-compressed we need to decompress the blobs to verify BLAKE2B hash.
// If it isn't, we need to manually try re-compressing individual files to store them.
var compressContext = preCompressed ? null : new ZStdCCtx();
var decompressContext = preCompressed ? new ZStdDCtx() : null;
// Normal file header:
// <int32> uncompressed length
// When preCompressed is set, we add:
// <int32> compressed length
var fileHeader = new byte[preCompressed ? 8 : 4];
var downloadLoadHandler = loadingHandlerFactory.CreateLoadingContext();
downloadLoadHandler.SetJobsCount(toDownload.Count);
downloadLoadHandler.SetLoadingMessage("Fetching files...");
if (loadingHandlerFactory is IConnectionSpeedHandler speedHandlerStart)
speedHandlerStart.PasteSpeed(0);
try
{
// Buffer for storing compressed ZStd data.
var compressBuffer = new byte[1024];
// Buffer for storing uncompressed data.
var readBuffer = new byte[1024];
var i = 0;
loadingHandler.AppendJob(toDownload.Count);
var downloadWatchdog = new Stopwatch();
var lengthAcc = 0;
var timeAcc = 0L;
foreach (var item in toDownload)
{
if (cancellationToken.IsCancellationRequested)
{
_logger.Log("Downloading cancelled!");
decompressContext?.Dispose();
compressContext?.Dispose();
return;
}
cancellationToken.ThrowIfCancellationRequested();
downloadWatchdog.Restart();
// Read file header.
await stream.ReadExactAsync(fileHeader, null);
await stream.ReadExactAsync(fileHeader, cancellationToken);
var length = BinaryPrimitives.ReadInt32LittleEndian(fileHeader.AsSpan(0, 4));
var fileLoadingHandler = loadingHandlerFactory.CreateLoadingContext(new FileLoadingFormater());
fileLoadingHandler.SetLoadingMessage(item.Path.Split("/").Last());
var blockFileLoadHandle = length <= 100000;
EnsureBuffer(ref readBuffer, length);
var data = readBuffer.AsMemory(0, length);
@@ -226,9 +208,10 @@ public partial class ContentService
if (compressedLength > 0)
{
fileLoadingHandler.AppendJob(compressedLength);
EnsureBuffer(ref compressBuffer, compressedLength);
var compressedData = compressBuffer.AsMemory(0, compressedLength);
await stream.ReadExactAsync(compressedData, null);
await stream.ReadExactAsync(compressedData, cancellationToken, blockFileLoadHandle ? null : fileLoadingHandler);
// Decompress so that we can verify hash down below.
@@ -239,24 +222,53 @@ public partial class ContentService
}
else
{
await stream.ReadExactAsync(data, null);
fileLoadingHandler.AppendJob(length);
await stream.ReadExactAsync(data, cancellationToken, blockFileLoadHandle ? null : fileLoadingHandler);
}
}
else
{
await stream.ReadExactAsync(data, null);
fileLoadingHandler.AppendJob(length);
await stream.ReadExactAsync(data, cancellationToken, blockFileLoadHandle ? null : fileLoadingHandler);
}
using var fileStream = new MemoryStream(data.ToArray());
hashApi.Save(item, fileStream);
hashApi.Save(item, fileStream, null);
_logger.Log("file saved:" + item.Path);
loadingHandler.AppendResolvedJob();
fileLoadingHandler.Dispose();
downloadLoadHandler.AppendResolvedJob();
i += 1;
if (loadingHandlerFactory is not IConnectionSpeedHandler speedHandler)
continue;
if (downloadWatchdog.ElapsedMilliseconds + timeAcc < 1000)
{
timeAcc += downloadWatchdog.ElapsedMilliseconds;
lengthAcc += length;
continue;
}
if (timeAcc != 0)
{
timeAcc += downloadWatchdog.ElapsedMilliseconds;
lengthAcc += length;
speedHandler.PasteSpeed((int)(lengthAcc / (timeAcc / 1000)));
timeAcc = 0;
lengthAcc = 0;
continue;
}
speedHandler.PasteSpeed((int)(length / (downloadWatchdog.ElapsedMilliseconds / 1000)));
}
}
finally
{
downloadLoadHandler.Dispose();
decompressContext?.Dispose();
compressContext?.Dispose();
}

View File

@@ -5,7 +5,7 @@ namespace Nebula.Shared.Services;
public partial class ContentService
{
public bool CheckMigration(ILoadingHandler loadingHandler)
public bool CheckMigration(ILoadingHandlerFactory loadingHandler)
{
_logger.Log("Checking migration...");
@@ -17,16 +17,13 @@ public partial class ContentService
return true;
}
private void DoMigration(ILoadingHandler loadingHandler, List<string> migrationList)
private void DoMigration(ILoadingHandlerFactory loadingHandler, List<string> migrationList)
{
loadingHandler.SetJobsCount(migrationList.Count);
var mainLoadingHandler = loadingHandler.CreateLoadingContext();
mainLoadingHandler.SetJobsCount(migrationList.Count);
Parallel.ForEach(migrationList, (f,_)=>MigrateFile(f,loadingHandler));
if (loadingHandler is IDisposable disposable)
{
disposable.Dispose();
}
Parallel.ForEach(migrationList, (f,_)=>MigrateFile(f, mainLoadingHandler) );
loadingHandler.Dispose();
}
private void MigrateFile(string file, ILoadingHandler loadingHandler)

View File

@@ -108,11 +108,13 @@ public sealed class EngineService
return info != null;
}
public async Task<AssemblyApi?> EnsureEngine(string version)
public async Task<AssemblyApi?> EnsureEngine(string version, ILoadingHandlerFactory loadingHandlerFactory, CancellationToken cancellationToken = default)
{
_logger.Log("Ensure engine " + version);
using var loadingHandler = loadingHandlerFactory.CreateLoadingContext(new FileLoadingFormater());
loadingHandler.SetLoadingMessage("Ensuring engine " + version);
if (!TryOpen(version)) await DownloadEngine(version);
if (!TryOpen(version)) await DownloadEngine(version, loadingHandler, cancellationToken);
try
{
@@ -128,15 +130,24 @@ public sealed class EngineService
return null;
}
public async Task DownloadEngine(string version)
public async Task DownloadEngine(string version, ILoadingHandler loadingHandler, CancellationToken cancellationToken = default)
{
if (!TryGetVersionInfo(version, out var info))
return;
_logger.Log("Downloading engine version " + version);
loadingHandler.SetLoadingMessage("Downloading engine version " + version);
loadingHandler.Clear();
using var client = new HttpClient();
var s = await client.GetStreamAsync(info.Url);
_engineFileApi.Save(version, s);
var response = await client.GetAsync(info.Url, cancellationToken);
response.EnsureSuccessStatusCode();
loadingHandler.SetJobsCount(response.Content.Headers.ContentLength ?? 0);
await using var streamContent = await response.Content.ReadAsStreamAsync(cancellationToken);
var s = await client.GetStreamAsync(info.Url, cancellationToken);
_engineFileApi.Save(version, s, loadingHandler);
await s.DisposeAsync();
}
@@ -176,7 +187,7 @@ public sealed class EngineService
{
GetEngineInfo(out var modulesInfo, out var engineVersionInfo);
var engineVersionObj = Version.Parse(engineVersion);
var engineVersionObj = Version.Parse(engineVersion.Split("-")[0]);
var module = modulesInfo.Modules[moduleName];
var selectedVersion = module.Versions.Select(kv => new { Version = Version.Parse(kv.Key), kv.Key, kv })
.Where(kv => engineVersionObj >= kv.Version)
@@ -187,15 +198,18 @@ public sealed class EngineService
return selectedVersion.Key;
}
public async Task<AssemblyApi?> EnsureEngineModules(string moduleName, string engineVersion)
public async Task<AssemblyApi?> EnsureEngineModules(string moduleName, ILoadingHandlerFactory loadingHandlerFactory, string engineVersion)
{
var moduleVersion = ResolveModuleVersion(moduleName, engineVersion);
if (!TryGetModuleBuildInfo(moduleName, moduleVersion, out var buildInfo))
return null;
var fileName = ConcatName(moduleName, moduleVersion);
using var loadingHandler = loadingHandlerFactory.CreateLoadingContext(new FileLoadingFormater());
loadingHandler.SetLoadingMessage("Ensuring engine module " + fileName);
if (!TryOpen(fileName)) await DownloadEngineModule(moduleName, moduleVersion);
if (!TryOpen(fileName)) await DownloadEngineModule(moduleName, loadingHandler, moduleVersion);
try
{
@@ -210,19 +224,20 @@ public sealed class EngineService
}
}
public async Task DownloadEngineModule(string moduleName, string moduleVersion)
public async Task DownloadEngineModule(string moduleName, ILoadingHandler loadingHandler, string moduleVersion)
{
if (!TryGetModuleBuildInfo(moduleName, moduleVersion, out var info))
return;
_logger.Log("Downloading engine module version " + moduleVersion);
loadingHandler.SetLoadingMessage("Downloading engine module version " + moduleVersion);
using var client = new HttpClient();
var s = await client.GetStreamAsync(info.Url);
_engineFileApi.Save(ConcatName(moduleName, moduleVersion), s);
_engineFileApi.Save(ConcatName(moduleName, moduleVersion), s, loadingHandler);
await s.DisposeAsync();
}
public string ConcatName(string moduleName, string moduleVersion)
private string ConcatName(string moduleName, string moduleVersion)
{
return moduleName + "" + moduleVersion;
}

View File

@@ -89,14 +89,26 @@ public class FileService
}
}
public sealed class ConsoleLoadingHandlerFactory : ILoadingHandlerFactory
{
public ILoadingHandler CreateLoadingContext(ILoadingFormater? loadingFormater = null)
{
return new ConsoleLoadingHandler();
}
public void Dispose()
{
}
}
public sealed class ConsoleLoadingHandler : ILoadingHandler
{
private int _currJobs;
private long _currJobs;
private float _percent;
private int _resolvedJobs;
private long _resolvedJobs;
public void SetJobsCount(int count)
public void SetJobsCount(long count)
{
_currJobs = count;
@@ -104,12 +116,12 @@ public sealed class ConsoleLoadingHandler : ILoadingHandler
Draw();
}
public int GetJobsCount()
public long GetJobsCount()
{
return _currJobs;
}
public void SetResolvedJobsCount(int count)
public void SetResolvedJobsCount(long count)
{
_resolvedJobs = count;
@@ -117,7 +129,7 @@ public sealed class ConsoleLoadingHandler : ILoadingHandler
Draw();
}
public int GetResolvedJobsCount()
public long GetResolvedJobsCount()
{
return _resolvedJobs;
}
@@ -154,4 +166,9 @@ public sealed class ConsoleLoadingHandler : ILoadingHandler
Console.Write($"\t {_resolvedJobs}/{_currJobs}");
}
public void Dispose()
{
}
}

View File

@@ -29,10 +29,7 @@ public class RestService
[Pure]
public async Task<T> GetAsync<T>(Uri uri, CancellationToken cancellationToken) where T : notnull
{
var httpRequestMessage = new HttpRequestMessage(HttpMethod.Get, uri)
{
Version = HttpVersion.Version10,
};
var httpRequestMessage = new HttpRequestMessage(HttpMethod.Get, uri);
var response = await _client.SendAsync(httpRequestMessage, cancellationToken).ConfigureAwait(false);
return await ReadResult<T>(response, cancellationToken, uri);

View File

@@ -1,138 +0,0 @@
using System.Diagnostics;
namespace Nebula.Shared.Utils;
public sealed class BandwidthStream : Stream
{
private const int NumSeconds = 8;
private const int BucketDivisor = 2;
private const int BucketsPerSecond = 2 << BucketDivisor;
// TotalBuckets MUST be power of two!
private const int TotalBuckets = NumSeconds * BucketsPerSecond;
private readonly Stream _baseStream;
private readonly long[] _buckets;
private readonly Stopwatch _stopwatch;
private long _bucketIndex;
public BandwidthStream(Stream baseStream)
{
_stopwatch = Stopwatch.StartNew();
_baseStream = baseStream;
_buckets = new long[TotalBuckets];
}
public override bool CanRead => _baseStream.CanRead;
public override bool CanSeek => _baseStream.CanSeek;
public override bool CanWrite => _baseStream.CanWrite;
public override long Length => _baseStream.Length;
public override long Position
{
get => _baseStream.Position;
set => _baseStream.Position = value;
}
private void TrackBandwidth(long value)
{
const int bucketMask = TotalBuckets - 1;
var bucketIdx = CurBucketIdx();
// Increment to bucket idx, clearing along the way.
if (bucketIdx != _bucketIndex)
{
var diff = bucketIdx - _bucketIndex;
if (diff > TotalBuckets)
for (var i = _bucketIndex; i < bucketIdx; i++)
_buckets[i & bucketMask] = 0;
else
// We managed to skip so much time the whole buffer is empty.
Array.Clear(_buckets);
_bucketIndex = bucketIdx;
}
// Write value.
_buckets[bucketIdx & bucketMask] += value;
}
private long CurBucketIdx()
{
var elapsed = _stopwatch.Elapsed.TotalSeconds;
return (long)(elapsed / BucketsPerSecond);
}
public long CalcCurrentAvg()
{
var sum = 0L;
for (var i = 0; i < TotalBuckets; i++) sum += _buckets[i];
return sum >> BucketDivisor;
}
public override void Flush()
{
_baseStream.Flush();
}
public override Task FlushAsync(CancellationToken cancellationToken)
{
return _baseStream.FlushAsync(cancellationToken);
}
protected override void Dispose(bool disposing)
{
if (disposing)
_baseStream.Dispose();
}
public override ValueTask DisposeAsync()
{
return _baseStream.DisposeAsync();
}
public override int Read(byte[] buffer, int offset, int count)
{
var read = _baseStream.Read(buffer, offset, count);
TrackBandwidth(read);
return read;
}
public override async ValueTask<int> ReadAsync(Memory<byte> buffer, CancellationToken cancellationToken = default)
{
var read = await base.ReadAsync(buffer, cancellationToken);
TrackBandwidth(read);
return read;
}
public override long Seek(long offset, SeekOrigin origin)
{
return _baseStream.Seek(offset, origin);
}
public override void SetLength(long value)
{
_baseStream.SetLength(value);
}
public override void Write(byte[] buffer, int offset, int count)
{
_baseStream.Write(buffer, offset, count);
TrackBandwidth(count);
}
public override async ValueTask WriteAsync(
ReadOnlyMemory<byte> buffer,
CancellationToken cancellationToken = default)
{
await _baseStream.WriteAsync(buffer, cancellationToken);
TrackBandwidth(buffer.Length);
}
}

View File

@@ -0,0 +1,43 @@
namespace Nebula.Shared.Utils;
public static class ContentManifestParser
{
public static List<string> ExtractModules(Stream manifestStream)
{
using var reader = new StreamReader(manifestStream);
return ExtractModules(reader.ReadToEnd());
}
public static List<string> ExtractModules(string manifestContent)
{
var modules = new List<string>();
var lines = manifestContent.Split(new[] { '\r', '\n' }, StringSplitOptions.RemoveEmptyEntries);
bool inModulesSection = false;
foreach (var rawLine in lines)
{
var line = rawLine.Trim();
if (line.StartsWith("modules:"))
{
inModulesSection = true;
continue;
}
if (inModulesSection)
{
if (line.StartsWith("- "))
{
modules.Add(line.Substring(2).Trim());
}
else if (!line.StartsWith(" "))
{
break;
}
}
}
return modules;
}
}

View File

@@ -1,21 +1,37 @@
using System.Buffers;
using Nebula.Shared.Models;
namespace Nebula.Shared.Utils;
public static class StreamHelper
{
public static async ValueTask<byte[]> ReadExactAsync(this Stream stream, int amount, CancellationToken? cancel)
public static void CopyTo(this Stream input, Stream output, ILoadingHandler loadingHandler)
{
const int bufferSize = 81920;
var buffer = new byte[bufferSize];
int bytesRead;
while ((bytesRead = input.Read(buffer, 0, buffer.Length)) > 0)
{
output.Write(buffer, 0, bytesRead);
loadingHandler.AppendResolvedJob(bytesRead);
}
}
public static async ValueTask<byte[]> ReadExactAsync(this Stream stream, int amount, CancellationToken cancel = default)
{
var data = new byte[amount];
await ReadExactAsync(stream, data, cancel);
return data;
}
public static async ValueTask ReadExactAsync(this Stream stream, Memory<byte> into, CancellationToken? cancel)
public static async ValueTask ReadExactAsync(this Stream stream, Memory<byte> into, CancellationToken cancel = default, ILoadingHandler? loadingHandler = null)
{
while (into.Length > 0)
{
var read = await stream.ReadAsync(into);
var read = await stream.ReadAsync(into, cancel);
loadingHandler?.AppendResolvedJob(read);
// Check EOF.
if (read == 0)
@@ -24,31 +40,4 @@ public static class StreamHelper
into = into[read..];
}
}
public static async Task CopyAmountToAsync(
this Stream stream,
Stream to,
int amount,
int bufferSize,
CancellationToken cancel)
{
var buffer = ArrayPool<byte>.Shared.Rent(bufferSize);
while (amount > 0)
{
Memory<byte> readInto = buffer;
if (amount < readInto.Length)
readInto = readInto[..amount];
var read = await stream.ReadAsync(readInto, cancel);
if (read == 0)
throw new EndOfStreamException();
amount -= read;
readInto = readInto[..read];
await to.WriteAsync(readInto, cancel);
}
}
}