改用Google.Protobuf源码版本, 扩展MergeFromEx方法,额外传入CodedInputStream,避免源码每次创建,复用单个对象并做好reset,减少GC

This commit is contained in:
sin365 2025-11-06 15:30:17 +08:00
parent a4b39f36bb
commit ec94778778
219 changed files with 46529 additions and 33 deletions

View File

@ -1,33 +0,0 @@
fileFormatVersion: 2
guid: 2c61c2e567bd4e146b4c09946e815a55
PluginImporter:
externalObjects: {}
serializedVersion: 2
iconMap: {}
executionOrder: {}
defineConstraints: []
isPreloaded: 0
isOverridable: 0
isExplicitlyReferenced: 0
validateReferences: 1
platformData:
- first:
Any:
second:
enabled: 1
settings: {}
- first:
Editor: Editor
second:
enabled: 0
settings:
DefaultValueInitialized: true
- first:
Windows Store Apps: WindowsStoreApps
second:
enabled: 0
settings:
CPU: AnyCPU
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: 4a8c636c0276a9740b79c406e75975e0
folderAsset: yes
DefaultImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,56 @@
#region Copyright notice and license
// Protocol Buffers - Google's data interchange format
// Copyright 2008 Google Inc. All rights reserved.
//
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file or at
// https://developers.google.com/open-source/licenses/bsd
#endregion
using System;
namespace Google.Protobuf
{
/// <summary>
/// Provides a utility routine to copy small arrays much more quickly than Buffer.BlockCopy
/// </summary>
internal static class ByteArray
{
/// <summary>
/// The threshold above which you should use Buffer.BlockCopy rather than ByteArray.Copy
/// </summary>
private const int CopyThreshold = 12;
/// <summary>
/// Determines which copy routine to use based on the number of bytes to be copied.
/// </summary>
internal static void Copy(byte[] src, int srcOffset, byte[] dst, int dstOffset, int count)
{
if (count > CopyThreshold)
{
Buffer.BlockCopy(src, srcOffset, dst, dstOffset, count);
}
else
{
int stop = srcOffset + count;
for (int i = srcOffset; i < stop; i++)
{
dst[dstOffset++] = src[i];
}
}
}
/// <summary>
/// Reverses the order of bytes in the array
/// </summary>
internal static void Reverse(byte[] bytes)
{
for (int first = 0, last = bytes.Length - 1; first < last; first++, last--)
{
byte temp = bytes[first];
bytes[first] = bytes[last];
bytes[last] = temp;
}
}
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 572496ae08a6d1348a4f88c22abdaa12
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,424 @@
#region Copyright notice and license
// Protocol Buffers - Google's data interchange format
// Copyright 2008 Google Inc. All rights reserved.
//
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file or at
// https://developers.google.com/open-source/licenses/bsd
#endregion
using System;
using System.Collections;
using System.Collections.Generic;
using System.Diagnostics;
using System.IO;
using System.Runtime.InteropServices;
using System.Security;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
namespace Google.Protobuf
{
/// <summary>
/// Immutable array of bytes.
/// </summary>
[SecuritySafeCritical]
[DebuggerDisplay("Length = {Length}")]
[DebuggerTypeProxy(typeof(ByteStringDebugView))]
public sealed class ByteString : IEnumerable<byte>, IEquatable<ByteString>
{
private static readonly ByteString empty = new ByteString(new byte[0]);
private readonly ReadOnlyMemory<byte> bytes;
/// <summary>
/// Internal use only. Ensure that the provided memory is not mutated and belongs to this instance.
/// </summary>
internal static ByteString AttachBytes(ReadOnlyMemory<byte> bytes)
{
return new ByteString(bytes);
}
/// <summary>
/// Internal use only. Ensure that the provided memory is not mutated and belongs to this instance.
/// This method encapsulates converting array to memory. Reduces need for SecuritySafeCritical
/// in .NET Framework.
/// </summary>
internal static ByteString AttachBytes(byte[] bytes)
{
return AttachBytes(bytes.AsMemory());
}
/// <summary>
/// Constructs a new ByteString from the given memory. The memory is
/// *not* copied, and must not be modified after this constructor is called.
/// </summary>
private ByteString(ReadOnlyMemory<byte> bytes)
{
this.bytes = bytes;
}
/// <summary>
/// Returns an empty ByteString.
/// </summary>
public static ByteString Empty
{
get { return empty; }
}
/// <summary>
/// Returns the length of this ByteString in bytes.
/// </summary>
public int Length
{
get { return bytes.Length; }
}
/// <summary>
/// Returns <c>true</c> if this byte string is empty, <c>false</c> otherwise.
/// </summary>
public bool IsEmpty
{
get { return Length == 0; }
}
/// <summary>
/// Provides read-only access to the data of this <see cref="ByteString"/>.
/// No data is copied so this is the most efficient way of accessing.
/// </summary>
public ReadOnlySpan<byte> Span
{
get { return bytes.Span; }
}
/// <summary>
/// Provides read-only access to the data of this <see cref="ByteString"/>.
/// No data is copied so this is the most efficient way of accessing.
/// </summary>
public ReadOnlyMemory<byte> Memory
{
get { return bytes; }
}
/// <summary>
/// Converts this <see cref="ByteString"/> into a byte array.
/// </summary>
/// <remarks>The data is copied - changes to the returned array will not be reflected in this <c>ByteString</c>.</remarks>
/// <returns>A byte array with the same data as this <c>ByteString</c>.</returns>
public byte[] ToByteArray()
{
return bytes.ToArray();
}
/// <summary>
/// Converts this <see cref="ByteString"/> into a standard base64 representation.
/// </summary>
/// <returns>A base64 representation of this <c>ByteString</c>.</returns>
public string ToBase64()
{
#if NET5_0_OR_GREATER
return Convert.ToBase64String(bytes.Span);
#else
if (MemoryMarshal.TryGetArray(bytes, out ArraySegment<byte> segment))
{
// Fast path. ByteString was created with an array, so pass the underlying array.
return Convert.ToBase64String(segment.Array, segment.Offset, segment.Count);
}
else
{
// Slow path. BytesString is not an array. Convert memory and pass result to ToBase64String.
return Convert.ToBase64String(bytes.ToArray());
}
#endif
}
/// <summary>
/// Constructs a <see cref="ByteString" /> from the Base64 Encoded String.
/// </summary>
public static ByteString FromBase64(string bytes)
{
// By handling the empty string explicitly, we not only optimize but we fix a
// problem on CF 2.0. See issue 61 for details.
return bytes == "" ? Empty : new ByteString(Convert.FromBase64String(bytes));
}
/// <summary>
/// Constructs a <see cref="ByteString"/> from data in the given stream, synchronously.
/// </summary>
/// <remarks>If successful, <paramref name="stream"/> will be read completely, from the position
/// at the start of the call.</remarks>
/// <param name="stream">The stream to copy into a ByteString.</param>
/// <returns>A ByteString with content read from the given stream.</returns>
public static ByteString FromStream(Stream stream)
{
ProtoPreconditions.CheckNotNull(stream, nameof(stream));
int capacity = stream.CanSeek ? checked((int) (stream.Length - stream.Position)) : 0;
var memoryStream = new MemoryStream(capacity);
stream.CopyTo(memoryStream);
#if NETSTANDARD1_1 || NETSTANDARD2_0
byte[] bytes = memoryStream.ToArray();
#else
// Avoid an extra copy if we can.
byte[] bytes = memoryStream.Length == memoryStream.Capacity ? memoryStream.GetBuffer() : memoryStream.ToArray();
#endif
return AttachBytes(bytes);
}
/// <summary>
/// Constructs a <see cref="ByteString"/> from data in the given stream, asynchronously.
/// </summary>
/// <remarks>If successful, <paramref name="stream"/> will be read completely, from the position
/// at the start of the call.</remarks>
/// <param name="stream">The stream to copy into a ByteString.</param>
/// <param name="cancellationToken">The cancellation token to use when reading from the stream, if any.</param>
/// <returns>A ByteString with content read from the given stream.</returns>
public static Task<ByteString> FromStreamAsync(Stream stream, CancellationToken cancellationToken = default)
{
ProtoPreconditions.CheckNotNull(stream, nameof(stream));
return ByteStringAsync.FromStreamAsyncCore(stream, cancellationToken);
}
/// <summary>
/// Constructs a <see cref="ByteString" /> from the given array. The contents
/// are copied, so further modifications to the array will not
/// be reflected in the returned ByteString.
/// This method can also be invoked in <c>ByteString.CopyFrom(0xaa, 0xbb, ...)</c> form
/// which is primarily useful for testing.
/// </summary>
public static ByteString CopyFrom(params byte[] bytes)
{
return new ByteString((byte[]) bytes.Clone());
}
/// <summary>
/// Constructs a <see cref="ByteString" /> from a portion of a byte array.
/// </summary>
public static ByteString CopyFrom(byte[] bytes, int offset, int count)
{
byte[] portion = new byte[count];
ByteArray.Copy(bytes, offset, portion, 0, count);
return new ByteString(portion);
}
/// <summary>
/// Constructs a <see cref="ByteString" /> from a read only span. The contents
/// are copied, so further modifications to the span will not
/// be reflected in the returned <see cref="ByteString" />.
/// </summary>
public static ByteString CopyFrom(ReadOnlySpan<byte> bytes)
{
return new ByteString(bytes.ToArray());
}
/// <summary>
/// Creates a new <see cref="ByteString" /> by encoding the specified text with
/// the given encoding.
/// </summary>
public static ByteString CopyFrom(string text, Encoding encoding)
{
return new ByteString(encoding.GetBytes(text));
}
/// <summary>
/// Creates a new <see cref="ByteString" /> by encoding the specified text in UTF-8.
/// </summary>
public static ByteString CopyFromUtf8(string text)
{
return CopyFrom(text, Encoding.UTF8);
}
/// <summary>
/// Returns the byte at the given index.
/// </summary>
public byte this[int index]
{
get { return bytes.Span[index]; }
}
/// <summary>
/// Converts this <see cref="ByteString"/> into a string by applying the given encoding.
/// </summary>
/// <remarks>
/// This method should only be used to convert binary data which was the result of encoding
/// text with the given encoding.
/// </remarks>
/// <param name="encoding">The encoding to use to decode the binary data into text.</param>
/// <returns>The result of decoding the binary data with the given decoding.</returns>
public string ToString(Encoding encoding)
{
if (MemoryMarshal.TryGetArray(bytes, out ArraySegment<byte> segment))
{
// Fast path. ByteString was created with an array.
return encoding.GetString(segment.Array, segment.Offset, segment.Count);
}
else
{
// Slow path. BytesString is not an array. Convert memory and pass result to GetString.
// TODO: Consider using GetString overload that takes a pointer.
byte[] array = bytes.ToArray();
return encoding.GetString(array, 0, array.Length);
}
}
/// <summary>
/// Converts this <see cref="ByteString"/> into a string by applying the UTF-8 encoding.
/// </summary>
/// <remarks>
/// This method should only be used to convert binary data which was the result of encoding
/// text with UTF-8.
/// </remarks>
/// <returns>The result of decoding the binary data with the given decoding.</returns>
public string ToStringUtf8()
{
return ToString(Encoding.UTF8);
}
/// <summary>
/// Returns an iterator over the bytes in this <see cref="ByteString"/>.
/// </summary>
/// <returns>An iterator over the bytes in this object.</returns>
[SecuritySafeCritical]
public IEnumerator<byte> GetEnumerator()
{
return MemoryMarshal.ToEnumerable(bytes).GetEnumerator();
}
/// <summary>
/// Returns an iterator over the bytes in this <see cref="ByteString"/>.
/// </summary>
/// <returns>An iterator over the bytes in this object.</returns>
IEnumerator IEnumerable.GetEnumerator()
{
return GetEnumerator();
}
/// <summary>
/// Creates a CodedInputStream from this ByteString's data.
/// </summary>
public CodedInputStream CreateCodedInput()
{
// We trust CodedInputStream not to reveal the provided byte array or modify it
if (MemoryMarshal.TryGetArray(bytes, out ArraySegment<byte> segment) && segment.Count == bytes.Length)
{
// Fast path. ByteString was created with a complete array.
return new CodedInputStream(segment.Array, segment.Offset, segment.Count);
}
else
{
// Slow path. BytesString is not an array, or is a slice of an array.
// Convert memory and pass result to WriteRawBytes.
return new CodedInputStream(bytes.ToArray());
}
}
/// <summary>
/// Compares two byte strings for equality.
/// </summary>
/// <param name="lhs">The first byte string to compare.</param>
/// <param name="rhs">The second byte string to compare.</param>
/// <returns><c>true</c> if the byte strings are equal; false otherwise.</returns>
public static bool operator ==(ByteString lhs, ByteString rhs)
{
if (ReferenceEquals(lhs, rhs))
{
return true;
}
if (lhs is null || rhs is null)
{
return false;
}
return lhs.bytes.Span.SequenceEqual(rhs.bytes.Span);
}
/// <summary>
/// Compares two byte strings for inequality.
/// </summary>
/// <param name="lhs">The first byte string to compare.</param>
/// <param name="rhs">The second byte string to compare.</param>
/// <returns><c>false</c> if the byte strings are equal; true otherwise.</returns>
public static bool operator !=(ByteString lhs, ByteString rhs)
{
return !(lhs == rhs);
}
/// <summary>
/// Compares this byte string with another object.
/// </summary>
/// <param name="obj">The object to compare this with.</param>
/// <returns><c>true</c> if <paramref name="obj"/> refers to an equal <see cref="ByteString"/>; <c>false</c> otherwise.</returns>
[SecuritySafeCritical]
public override bool Equals(object obj)
{
return this == (obj as ByteString);
}
/// <summary>
/// Returns a hash code for this object. Two equal byte strings
/// will return the same hash code.
/// </summary>
/// <returns>A hash code for this object.</returns>
[SecuritySafeCritical]
public override int GetHashCode()
{
ReadOnlySpan<byte> b = bytes.Span;
int ret = 23;
for (int i = 0; i < b.Length; i++)
{
ret = (ret * 31) + b[i];
}
return ret;
}
/// <summary>
/// Compares this byte string with another.
/// </summary>
/// <param name="other">The <see cref="ByteString"/> to compare this with.</param>
/// <returns><c>true</c> if <paramref name="other"/> refers to an equal byte string; <c>false</c> otherwise.</returns>
public bool Equals(ByteString other)
{
return this == other;
}
/// <summary>
/// Copies the entire byte array to the destination array provided at the offset specified.
/// </summary>
public void CopyTo(byte[] array, int position)
{
bytes.CopyTo(array.AsMemory(position));
}
/// <summary>
/// Writes the entire byte array to the provided stream
/// </summary>
public void WriteTo(Stream outputStream)
{
if (MemoryMarshal.TryGetArray(bytes, out ArraySegment<byte> segment))
{
// Fast path. ByteString was created with an array, so pass the underlying array.
outputStream.Write(segment.Array, segment.Offset, segment.Count);
}
else
{
// Slow path. BytesString is not an array. Convert memory and pass result to WriteRawBytes.
var array = bytes.ToArray();
outputStream.Write(array, 0, array.Length);
}
}
private sealed class ByteStringDebugView
{
private readonly ByteString data;
public ByteStringDebugView(ByteString data)
{
this.data = data;
}
[DebuggerBrowsable(DebuggerBrowsableState.RootHidden)]
public byte[] Items => data.bytes.ToArray();
}
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 3bb4b452d5f4add4695a43f8802bf056
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,39 @@
#region Copyright notice and license
// Protocol Buffers - Google's data interchange format
// Copyright 2008 Google Inc. All rights reserved.
//
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file or at
// https://developers.google.com/open-source/licenses/bsd
#endregion
using System;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
namespace Google.Protobuf
{
/// <summary>
/// SecuritySafeCritical attribute can not be placed on types with async methods.
/// This class has ByteString's async methods so it can be marked with SecuritySafeCritical.
/// </summary>
internal static class ByteStringAsync
{
internal static async Task<ByteString> FromStreamAsyncCore(Stream stream, CancellationToken cancellationToken)
{
int capacity = stream.CanSeek ? checked((int)(stream.Length - stream.Position)) : 0;
var memoryStream = new MemoryStream(capacity);
// We have to specify the buffer size here, as there's no overload accepting the cancellation token
// alone. But it's documented to use 81920 by default if not specified.
await stream.CopyToAsync(memoryStream, 81920, cancellationToken);
#if NETSTANDARD1_1
byte[] bytes = memoryStream.ToArray();
#else
// Avoid an extra copy if we can.
byte[] bytes = memoryStream.Length == memoryStream.Capacity ? memoryStream.GetBuffer() : memoryStream.ToArray();
#endif
return ByteString.AttachBytes(bytes);
}
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: af20324293fdab64597431aa3863202e
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,686 @@
#region Copyright notice and license
// Protocol Buffers - Google's data interchange format
// Copyright 2008 Google Inc. All rights reserved.
//
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file or at
// https://developers.google.com/open-source/licenses/bsd
#endregion
using Google.Protobuf.Collections;
using System;
using System.IO;
using System.Security;
namespace Google.Protobuf
{
/// <summary>
/// Reads and decodes protocol message fields.
/// </summary>
/// <remarks>
/// <para>
/// This class is generally used by generated code to read appropriate
/// primitives from the stream. It effectively encapsulates the lowest
/// levels of protocol buffer format.
/// </para>
/// <para>
/// Repeated fields and map fields are not handled by this class; use <see cref="RepeatedField{T}"/>
/// and <see cref="MapField{TKey, TValue}"/> to serialize such fields.
/// </para>
/// </remarks>
[SecuritySafeCritical]
public sealed class CodedInputStream : IDisposable
{
/// <summary>
/// Whether to leave the underlying stream open when disposing of this stream.
/// This is always true when there's no stream.
/// </summary>
private bool leaveOpen;
/// <summary>
/// Buffer of data read from the stream or provided at construction time.
/// </summary>
private byte[] buffer;
/// <summary>
/// The stream to read further input from, or null if the byte array buffer was provided
/// directly on construction, with no further data available.
/// </summary>
private readonly Stream input;
/// <summary>
/// The parser state is kept separately so that other parse implementations can reuse the same
/// parsing primitives.
/// </summary>
private ParserInternalState state;
internal const int DefaultRecursionLimit = 100;
internal const int DefaultSizeLimit = Int32.MaxValue;
internal const int BufferSize = 4096;
#region Construction
public CodedInputStream()
{
}
// Note that the checks are performed such that we don't end up checking obviously-valid things
// like non-null references for arrays we've just created.
/// <summary>
/// Creates a new CodedInputStream reading data from the given byte array.
/// </summary>
public CodedInputStream(byte[] buffer) : this(null, ProtoPreconditions.CheckNotNull(buffer, "buffer"), 0, buffer.Length, true)
{
}
/// <summary>
/// Creates a new <see cref="CodedInputStream"/> that reads from the given byte array slice.
/// </summary>
public CodedInputStream(byte[] buffer, int offset, int length)
: this(null, ProtoPreconditions.CheckNotNull(buffer, "buffer"), offset, offset + length, true)
{
if (offset < 0 || offset > buffer.Length)
{
throw new ArgumentOutOfRangeException("offset", "Offset must be within the buffer");
}
if (length < 0 || offset + length > buffer.Length)
{
throw new ArgumentOutOfRangeException("length", "Length must be non-negative and within the buffer");
}
}
/// <summary>
/// Creates a new <see cref="CodedInputStream"/> reading data from the given stream, which will be disposed
/// when the returned object is disposed.
/// </summary>
/// <param name="input">The stream to read from.</param>
public CodedInputStream(Stream input) : this(input, false)
{
}
/// <summary>
/// Creates a new <see cref="CodedInputStream"/> reading data from the given stream.
/// </summary>
/// <param name="input">The stream to read from.</param>
/// <param name="leaveOpen"><c>true</c> to leave <paramref name="input"/> open when the returned
/// <c cref="CodedInputStream"/> is disposed; <c>false</c> to dispose of the given stream when the
/// returned object is disposed.</param>
public CodedInputStream(Stream input, bool leaveOpen)
: this(ProtoPreconditions.CheckNotNull(input, "input"), new byte[BufferSize], 0, 0, leaveOpen)
{
}
/// <summary>
/// Creates a new CodedInputStream reading data from the given
/// stream and buffer, using the default limits.
/// </summary>
internal CodedInputStream(Stream input, byte[] buffer, int bufferPos, int bufferSize, bool leaveOpen)
{
this.input = input;
this.buffer = buffer;
this.state.bufferPos = bufferPos;
this.state.bufferSize = bufferSize;
this.state.sizeLimit = DefaultSizeLimit;
this.state.recursionLimit = DefaultRecursionLimit;
SegmentedBufferHelper.Initialize(this, out this.state.segmentedBufferHelper);
this.leaveOpen = leaveOpen;
this.state.currentLimit = int.MaxValue;
}
/// <summary>
/// Creates a new CodedInputStream reading data from the given
/// stream and buffer, using the specified limits.
/// </summary>
/// <remarks>
/// This chains to the version with the default limits instead of vice versa to avoid
/// having to check that the default values are valid every time.
/// </remarks>
internal CodedInputStream(Stream input, byte[] buffer, int bufferPos, int bufferSize, int sizeLimit, int recursionLimit, bool leaveOpen)
: this(input, buffer, bufferPos, bufferSize, leaveOpen)
{
if (sizeLimit <= 0)
{
throw new ArgumentOutOfRangeException("sizeLimit", "Size limit must be positive");
}
if (recursionLimit <= 0)
{
throw new ArgumentOutOfRangeException("recursionLimit!", "Recursion limit must be positive");
}
this.state.sizeLimit = sizeLimit;
this.state.recursionLimit = recursionLimit;
}
#endregion
/// <summary>
/// Creates a <see cref="CodedInputStream"/> with the specified size and recursion limits, reading
/// from an input stream.
/// </summary>
/// <remarks>
/// This method exists separately from the constructor to reduce the number of constructor overloads.
/// It is likely to be used considerably less frequently than the constructors, as the default limits
/// are suitable for most use cases.
/// </remarks>
/// <param name="input">The input stream to read from</param>
/// <param name="sizeLimit">The total limit of data to read from the stream.</param>
/// <param name="recursionLimit">The maximum recursion depth to allow while reading.</param>
/// <returns>A <c>CodedInputStream</c> reading from <paramref name="input"/> with the specified size
/// and recursion limits.</returns>
public static CodedInputStream CreateWithLimits(Stream input, int sizeLimit, int recursionLimit)
{
// Note: we may want an overload accepting leaveOpen
return new CodedInputStream(input, new byte[BufferSize], 0, 0, sizeLimit, recursionLimit, false);
}
/// <summary>
/// Returns the current position in the input stream, or the position in the input buffer
/// </summary>
public long Position
{
get
{
if (input != null)
{
return input.Position - ((state.bufferSize + state.bufferSizeAfterLimit) - state.bufferPos);
}
return state.bufferPos;
}
}
/// <summary>
/// Returns the last tag read, or 0 if no tags have been read or we've read beyond
/// the end of the stream.
/// </summary>
internal uint LastTag { get { return state.lastTag; } }
/// <summary>
/// Returns the size limit for this stream.
/// </summary>
/// <remarks>
/// This limit is applied when reading from the underlying stream, as a sanity check. It is
/// not applied when reading from a byte array data source without an underlying stream.
/// The default value is Int32.MaxValue.
/// </remarks>
/// <value>
/// The size limit.
/// </value>
public int SizeLimit { get { return state.sizeLimit; } }
/// <summary>
/// Returns the recursion limit for this stream. This limit is applied whilst reading messages,
/// to avoid maliciously-recursive data.
/// </summary>
/// <remarks>
/// The default limit is 100.
/// </remarks>
/// <value>
/// The recursion limit for this stream.
/// </value>
public int RecursionLimit { get { return state.recursionLimit; } }
/// <summary>
/// Internal-only property; when set to true, unknown fields will be discarded while parsing.
/// </summary>
internal bool DiscardUnknownFields
{
get { return state.DiscardUnknownFields; }
set { state.DiscardUnknownFields = value; }
}
/// <summary>
/// Internal-only property; provides extension identifiers to compatible messages while parsing.
/// </summary>
internal ExtensionRegistry ExtensionRegistry
{
get { return state.ExtensionRegistry; }
set { state.ExtensionRegistry = value; }
}
internal byte[] InternalBuffer => buffer;
internal Stream InternalInputStream => input;
internal ref ParserInternalState InternalState => ref state;
/// <summary>
/// Disposes of this instance, potentially closing any underlying stream.
/// </summary>
/// <remarks>
/// As there is no flushing to perform here, disposing of a <see cref="CodedInputStream"/> which
/// was constructed with the <c>leaveOpen</c> option parameter set to <c>true</c> (or one which
/// was constructed to read from a byte array) has no effect.
/// </remarks>
public void Dispose()
{
if (!leaveOpen)
{
input.Dispose();
}
}
#region Validation
/// <summary>
/// Verifies that the last call to ReadTag() returned tag 0 - in other words,
/// we've reached the end of the stream when we expected to.
/// </summary>
/// <exception cref="InvalidProtocolBufferException">The
/// tag read was not the one specified</exception>
internal void CheckReadEndOfStreamTag()
{
ParsingPrimitivesMessages.CheckReadEndOfStreamTag(ref state);
}
#endregion
#region Reading of tags etc
/// <summary>
/// Peeks at the next field tag. This is like calling <see cref="ReadTag"/>, but the
/// tag is not consumed. (So a subsequent call to <see cref="ReadTag"/> will return the
/// same value.)
/// </summary>
public uint PeekTag()
{
var span = new ReadOnlySpan<byte>(buffer);
return ParsingPrimitives.PeekTag(ref span, ref state);
}
/// <summary>
/// Reads a field tag, returning the tag of 0 for "end of stream".
/// </summary>
/// <remarks>
/// If this method returns 0, it doesn't necessarily mean the end of all
/// the data in this CodedInputStream; it may be the end of the logical stream
/// for an embedded message, for example.
/// </remarks>
/// <returns>The next field tag, or 0 for end of stream. (0 is never a valid tag.)</returns>
public uint ReadTag()
{
var span = new ReadOnlySpan<byte>(buffer);
return ParsingPrimitives.ParseTag(ref span, ref state);
}
/// <summary>
/// Skips the data for the field with the tag we've just read.
/// This should be called directly after <see cref="ReadTag"/>, when
/// the caller wishes to skip an unknown field.
/// </summary>
/// <remarks>
/// This method throws <see cref="InvalidProtocolBufferException"/> if the last-read tag was an end-group tag.
/// If a caller wishes to skip a group, they should skip the whole group, by calling this method after reading the
/// start-group tag. This behavior allows callers to call this method on any field they don't understand, correctly
/// resulting in an error if an end-group tag has not been paired with an earlier start-group tag.
/// </remarks>
/// <exception cref="InvalidProtocolBufferException">The last tag was an end-group tag</exception>
/// <exception cref="InvalidOperationException">The last read operation read to the end of the logical stream</exception>
public void SkipLastField()
{
var span = new ReadOnlySpan<byte>(buffer);
ParsingPrimitivesMessages.SkipLastField(ref span, ref state);
}
/// <summary>
/// Skip a group.
/// </summary>
internal void SkipGroup(uint startGroupTag)
{
var span = new ReadOnlySpan<byte>(buffer);
ParsingPrimitivesMessages.SkipGroup(ref span, ref state, startGroupTag);
}
/// <summary>
/// Reads a double field from the stream.
/// </summary>
public double ReadDouble()
{
var span = new ReadOnlySpan<byte>(buffer);
return ParsingPrimitives.ParseDouble(ref span, ref state);
}
/// <summary>
/// Reads a float field from the stream.
/// </summary>
public float ReadFloat()
{
var span = new ReadOnlySpan<byte>(buffer);
return ParsingPrimitives.ParseFloat(ref span, ref state);
}
/// <summary>
/// Reads a uint64 field from the stream.
/// </summary>
public ulong ReadUInt64()
{
return ReadRawVarint64();
}
/// <summary>
/// Reads an int64 field from the stream.
/// </summary>
public long ReadInt64()
{
return (long) ReadRawVarint64();
}
/// <summary>
/// Reads an int32 field from the stream.
/// </summary>
public int ReadInt32()
{
return (int) ReadRawVarint32();
}
/// <summary>
/// Reads a fixed64 field from the stream.
/// </summary>
public ulong ReadFixed64()
{
return ReadRawLittleEndian64();
}
/// <summary>
/// Reads a fixed32 field from the stream.
/// </summary>
public uint ReadFixed32()
{
return ReadRawLittleEndian32();
}
/// <summary>
/// Reads a bool field from the stream.
/// </summary>
public bool ReadBool()
{
return ReadRawVarint64() != 0;
}
/// <summary>
/// Reads a string field from the stream.
/// </summary>
public string ReadString()
{
var span = new ReadOnlySpan<byte>(buffer);
return ParsingPrimitives.ReadString(ref span, ref state);
}
/// <summary>
/// Reads an embedded message field value from the stream.
/// </summary>
public void ReadMessage(IMessage builder)
{
// TODO: if the message doesn't implement IBufferMessage (and thus does not provide the InternalMergeFrom method),
// what we're doing here works fine, but could be more efficient.
// What happens is that we first initialize a ParseContext from the current coded input stream only to parse the length of the message, at which point
// we will need to switch back again to CodedInputStream-based parsing (which involves copying and storing the state) to be able to
// invoke the legacy MergeFrom(CodedInputStream) method.
// For now, this inefficiency is fine, considering this is only a backward-compatibility scenario (and regenerating the code fixes it).
ParseContext.Initialize(buffer.AsSpan(), ref state, out ParseContext ctx);
try
{
ParsingPrimitivesMessages.ReadMessage(ref ctx, builder);
}
finally
{
ctx.CopyStateTo(this);
}
}
/// <summary>
/// Reads an embedded group field from the stream.
/// </summary>
public void ReadGroup(IMessage builder)
{
ParseContext.Initialize(this, out ParseContext ctx);
try
{
ParsingPrimitivesMessages.ReadGroup(ref ctx, builder);
}
finally
{
ctx.CopyStateTo(this);
}
}
/// <summary>
/// Reads a bytes field value from the stream.
/// </summary>
public ByteString ReadBytes()
{
var span = new ReadOnlySpan<byte>(buffer);
return ParsingPrimitives.ReadBytes(ref span, ref state);
}
/// <summary>
/// Reads a uint32 field value from the stream.
/// </summary>
public uint ReadUInt32()
{
return ReadRawVarint32();
}
/// <summary>
/// Reads an enum field value from the stream.
/// </summary>
public int ReadEnum()
{
// Currently just a pass-through, but it's nice to separate it logically from WriteInt32.
return (int) ReadRawVarint32();
}
/// <summary>
/// Reads an sfixed32 field value from the stream.
/// </summary>
public int ReadSFixed32()
{
return (int) ReadRawLittleEndian32();
}
/// <summary>
/// Reads an sfixed64 field value from the stream.
/// </summary>
public long ReadSFixed64()
{
return (long) ReadRawLittleEndian64();
}
/// <summary>
/// Reads an sint32 field value from the stream.
/// </summary>
public int ReadSInt32()
{
return ParsingPrimitives.DecodeZigZag32(ReadRawVarint32());
}
/// <summary>
/// Reads an sint64 field value from the stream.
/// </summary>
public long ReadSInt64()
{
return ParsingPrimitives.DecodeZigZag64(ReadRawVarint64());
}
/// <summary>
/// Reads a length for length-delimited data.
/// </summary>
/// <remarks>
/// This is internally just reading a varint, but this method exists
/// to make the calling code clearer.
/// </remarks>
public int ReadLength()
{
var span = new ReadOnlySpan<byte>(buffer);
return ParsingPrimitives.ParseLength(ref span, ref state);
}
/// <summary>
/// Peeks at the next tag in the stream. If it matches <paramref name="tag"/>,
/// the tag is consumed and the method returns <c>true</c>; otherwise, the
/// stream is left in the original position and the method returns <c>false</c>.
/// </summary>
public bool MaybeConsumeTag(uint tag)
{
var span = new ReadOnlySpan<byte>(buffer);
return ParsingPrimitives.MaybeConsumeTag(ref span, ref state, tag);
}
#endregion
#region Underlying reading primitives
/// <summary>
/// Reads a raw Varint from the stream. If larger than 32 bits, discard the upper bits.
/// This method is optimised for the case where we've got lots of data in the buffer.
/// That means we can check the size just once, then just read directly from the buffer
/// without constant rechecking of the buffer length.
/// </summary>
internal uint ReadRawVarint32()
{
var span = new ReadOnlySpan<byte>(buffer);
return ParsingPrimitives.ParseRawVarint32(ref span, ref state);
}
/// <summary>
/// Reads a varint from the input one byte at a time, so that it does not
/// read any bytes after the end of the varint. If you simply wrapped the
/// stream in a CodedInputStream and used ReadRawVarint32(Stream)
/// then you would probably end up reading past the end of the varint since
/// CodedInputStream buffers its input.
/// </summary>
/// <param name="input"></param>
/// <returns></returns>
internal static uint ReadRawVarint32(Stream input)
{
return ParsingPrimitives.ReadRawVarint32(input);
}
/// <summary>
/// Reads a raw varint from the stream.
/// </summary>
internal ulong ReadRawVarint64()
{
var span = new ReadOnlySpan<byte>(buffer);
return ParsingPrimitives.ParseRawVarint64(ref span, ref state);
}
/// <summary>
/// Reads a 32-bit little-endian integer from the stream.
/// </summary>
internal uint ReadRawLittleEndian32()
{
var span = new ReadOnlySpan<byte>(buffer);
return ParsingPrimitives.ParseRawLittleEndian32(ref span, ref state);
}
/// <summary>
/// Reads a 64-bit little-endian integer from the stream.
/// </summary>
internal ulong ReadRawLittleEndian64()
{
var span = new ReadOnlySpan<byte>(buffer);
return ParsingPrimitives.ParseRawLittleEndian64(ref span, ref state);
}
#endregion
#region Internal reading and buffer management
/// <summary>
/// Sets currentLimit to (current position) + byteLimit. This is called
/// when descending into a length-delimited embedded message. The previous
/// limit is returned.
/// </summary>
/// <returns>The old limit.</returns>
internal int PushLimit(int byteLimit)
{
return SegmentedBufferHelper.PushLimit(ref state, byteLimit);
}
/// <summary>
/// Discards the current limit, returning the previous limit.
/// </summary>
internal void PopLimit(int oldLimit)
{
SegmentedBufferHelper.PopLimit(ref state, oldLimit);
}
/// <summary>
/// Returns whether or not all the data before the limit has been read.
/// </summary>
/// <returns></returns>
internal bool ReachedLimit
{
get
{
return SegmentedBufferHelper.IsReachedLimit(ref state);
}
}
/// <summary>
/// Returns true if the stream has reached the end of the input. This is the
/// case if either the end of the underlying input source has been reached or
/// the stream has reached a limit created using PushLimit.
/// </summary>
public bool IsAtEnd
{
get
{
var span = new ReadOnlySpan<byte>(buffer);
return SegmentedBufferHelper.IsAtEnd(ref span, ref state);
}
}
/// <summary>
/// Reads a fixed size of bytes from the input.
/// </summary>
/// <exception cref="InvalidProtocolBufferException">
/// the end of the stream or the current limit was reached
/// </exception>
internal byte[] ReadRawBytes(int size)
{
var span = new ReadOnlySpan<byte>(buffer);
return ParsingPrimitives.ReadRawBytes(ref span, ref state, size);
}
/// <summary>
/// Reads a top-level message or a nested message after the limits for this message have been pushed.
/// (parser will proceed until the end of the current limit)
/// NOTE: this method needs to be public because it's invoked by the generated code - e.g. msg.MergeFrom(CodedInputStream input) method
/// </summary>
public void ReadRawMessage(IMessage message)
{
ParseContext.Initialize(this, out ParseContext ctx);
try
{
ParsingPrimitivesMessages.ReadRawMessage(ref ctx, message);
}
finally
{
ctx.CopyStateTo(this);
}
}
internal void Reset(byte[] data, int offset, int length, bool discardUnknownFields, ExtensionRegistry registry)
{
var buffer = data;
var bufferPos = offset;
var bufferSize = offset + length;
var leaveOpen = true;
this.buffer = buffer;
this.state = default;
this.state.bufferPos = bufferPos;
this.state.bufferSize = bufferSize;
this.state.sizeLimit = DefaultSizeLimit;
this.state.recursionLimit = DefaultRecursionLimit;
SegmentedBufferHelper.Initialize(this, out this.state.segmentedBufferHelper);
this.leaveOpen = leaveOpen;
this.state.currentLimit = int.MaxValue;
this.DiscardUnknownFields = DiscardUnknownFields;
this.ExtensionRegistry = registry;
}
#endregion
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 6bd391df78eafba4a9c8c037ef9deb31
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,285 @@
#region Copyright notice and license
// Protocol Buffers - Google's data interchange format
// Copyright 2008 Google Inc. All rights reserved.
//
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file or at
// https://developers.google.com/open-source/licenses/bsd
#endregion
using System;
namespace Google.Protobuf
{
// This part of CodedOutputStream provides all the static entry points that are used
// by generated code and internally to compute the size of messages prior to being
// written to an instance of CodedOutputStream.
public sealed partial class CodedOutputStream
{
private const int LittleEndian64Size = 8;
private const int LittleEndian32Size = 4;
internal const int DoubleSize = LittleEndian64Size;
internal const int FloatSize = LittleEndian32Size;
internal const int BoolSize = 1;
/// <summary>
/// Computes the number of bytes that would be needed to encode a
/// double field, including the tag.
/// </summary>
public static int ComputeDoubleSize(double value)
{
return DoubleSize;
}
/// <summary>
/// Computes the number of bytes that would be needed to encode a
/// float field, including the tag.
/// </summary>
public static int ComputeFloatSize(float value)
{
return FloatSize;
}
/// <summary>
/// Computes the number of bytes that would be needed to encode a
/// uint64 field, including the tag.
/// </summary>
public static int ComputeUInt64Size(ulong value)
{
return ComputeRawVarint64Size(value);
}
/// <summary>
/// Computes the number of bytes that would be needed to encode an
/// int64 field, including the tag.
/// </summary>
public static int ComputeInt64Size(long value)
{
return ComputeRawVarint64Size((ulong) value);
}
/// <summary>
/// Computes the number of bytes that would be needed to encode an
/// int32 field, including the tag.
/// </summary>
public static int ComputeInt32Size(int value)
{
if (value >= 0)
{
return ComputeRawVarint32Size((uint) value);
}
else
{
// Must sign-extend.
return 10;
}
}
/// <summary>
/// Computes the number of bytes that would be needed to encode a
/// fixed64 field, including the tag.
/// </summary>
public static int ComputeFixed64Size(ulong value)
{
return LittleEndian64Size;
}
/// <summary>
/// Computes the number of bytes that would be needed to encode a
/// fixed32 field, including the tag.
/// </summary>
public static int ComputeFixed32Size(uint value)
{
return LittleEndian32Size;
}
/// <summary>
/// Computes the number of bytes that would be needed to encode a
/// bool field, including the tag.
/// </summary>
public static int ComputeBoolSize(bool value)
{
return BoolSize;
}
/// <summary>
/// Computes the number of bytes that would be needed to encode a
/// string field, including the tag.
/// </summary>
public static int ComputeStringSize(String value)
{
int byteArraySize = WritingPrimitives.Utf8Encoding.GetByteCount(value);
return ComputeLengthSize(byteArraySize) + byteArraySize;
}
/// <summary>
/// Computes the number of bytes that would be needed to encode a
/// group field, including the tag.
/// </summary>
public static int ComputeGroupSize(IMessage value)
{
return value.CalculateSize();
}
/// <summary>
/// Computes the number of bytes that would be needed to encode an
/// embedded message field, including the tag.
/// </summary>
public static int ComputeMessageSize(IMessage value)
{
int size = value.CalculateSize();
return ComputeLengthSize(size) + size;
}
/// <summary>
/// Computes the number of bytes that would be needed to encode a
/// bytes field, including the tag.
/// </summary>
public static int ComputeBytesSize(ByteString value)
{
return ComputeLengthSize(value.Length) + value.Length;
}
/// <summary>
/// Computes the number of bytes that would be needed to encode a
/// uint32 field, including the tag.
/// </summary>
public static int ComputeUInt32Size(uint value)
{
return ComputeRawVarint32Size(value);
}
/// <summary>
/// Computes the number of bytes that would be needed to encode a
/// enum field, including the tag. The caller is responsible for
/// converting the enum value to its numeric value.
/// </summary>
public static int ComputeEnumSize(int value)
{
// Currently just a pass-through, but it's nice to separate it logically.
return ComputeInt32Size(value);
}
/// <summary>
/// Computes the number of bytes that would be needed to encode an
/// sfixed32 field, including the tag.
/// </summary>
public static int ComputeSFixed32Size(int value)
{
return LittleEndian32Size;
}
/// <summary>
/// Computes the number of bytes that would be needed to encode an
/// sfixed64 field, including the tag.
/// </summary>
public static int ComputeSFixed64Size(long value)
{
return LittleEndian64Size;
}
/// <summary>
/// Computes the number of bytes that would be needed to encode an
/// sint32 field, including the tag.
/// </summary>
public static int ComputeSInt32Size(int value)
{
return ComputeRawVarint32Size(WritingPrimitives.EncodeZigZag32(value));
}
/// <summary>
/// Computes the number of bytes that would be needed to encode an
/// sint64 field, including the tag.
/// </summary>
public static int ComputeSInt64Size(long value)
{
return ComputeRawVarint64Size(WritingPrimitives.EncodeZigZag64(value));
}
/// <summary>
/// Computes the number of bytes that would be needed to encode a length,
/// as written by <see cref="WriteLength"/>.
/// </summary>
public static int ComputeLengthSize(int length)
{
return ComputeRawVarint32Size((uint) length);
}
/// <summary>
/// Computes the number of bytes that would be needed to encode a varint.
/// </summary>
public static int ComputeRawVarint32Size(uint value)
{
if ((value & (0xffffffff << 7)) == 0)
{
return 1;
}
if ((value & (0xffffffff << 14)) == 0)
{
return 2;
}
if ((value & (0xffffffff << 21)) == 0)
{
return 3;
}
if ((value & (0xffffffff << 28)) == 0)
{
return 4;
}
return 5;
}
/// <summary>
/// Computes the number of bytes that would be needed to encode a varint.
/// </summary>
public static int ComputeRawVarint64Size(ulong value)
{
if ((value & (0xffffffffffffffffL << 7)) == 0)
{
return 1;
}
if ((value & (0xffffffffffffffffL << 14)) == 0)
{
return 2;
}
if ((value & (0xffffffffffffffffL << 21)) == 0)
{
return 3;
}
if ((value & (0xffffffffffffffffL << 28)) == 0)
{
return 4;
}
if ((value & (0xffffffffffffffffL << 35)) == 0)
{
return 5;
}
if ((value & (0xffffffffffffffffL << 42)) == 0)
{
return 6;
}
if ((value & (0xffffffffffffffffL << 49)) == 0)
{
return 7;
}
if ((value & (0xffffffffffffffffL << 56)) == 0)
{
return 8;
}
if ((value & (0xffffffffffffffffL << 63)) == 0)
{
return 9;
}
return 10;
}
/// <summary>
/// Computes the number of bytes that would be needed to encode a tag.
/// </summary>
public static int ComputeTagSize(int fieldNumber)
{
return ComputeRawVarint32Size(WireFormat.MakeTag(fieldNumber, 0));
}
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 8f24c14bfc2490f47bc214729d14cd79
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,609 @@
#region Copyright notice and license
// Protocol Buffers - Google's data interchange format
// Copyright 2008 Google Inc. All rights reserved.
//
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file or at
// https://developers.google.com/open-source/licenses/bsd
#endregion
using System;
using System.IO;
using System.Security;
namespace Google.Protobuf
{
/// <summary>
/// Encodes and writes protocol message fields.
/// </summary>
/// <remarks>
/// <para>
/// This class is generally used by generated code to write appropriate
/// primitives to the stream. It effectively encapsulates the lowest
/// levels of protocol buffer format. Unlike some other implementations,
/// this does not include combined "write tag and value" methods. Generated
/// code knows the exact byte representations of the tags they're going to write,
/// so there's no need to re-encode them each time. Manually-written code calling
/// this class should just call one of the <c>WriteTag</c> overloads before each value.
/// </para>
/// <para>
/// Repeated fields and map fields are not handled by this class; use <c>RepeatedField&lt;T&gt;</c>
/// and <c>MapField&lt;TKey, TValue&gt;</c> to serialize such fields.
/// </para>
/// </remarks>
[SecuritySafeCritical]
public sealed partial class CodedOutputStream : IDisposable
{
/// <summary>
/// The buffer size used by CreateInstance(Stream).
/// </summary>
public static readonly int DefaultBufferSize = 4096;
private readonly bool leaveOpen;
private readonly byte[] buffer;
private WriterInternalState state;
private readonly Stream output;
#region Construction
/// <summary>
/// Creates a new CodedOutputStream that writes directly to the given
/// byte array. If more bytes are written than fit in the array,
/// OutOfSpaceException will be thrown.
/// </summary>
public CodedOutputStream(byte[] flatArray) : this(flatArray, 0, flatArray.Length)
{
}
/// <summary>
/// Creates a new CodedOutputStream that writes directly to the given
/// byte array slice. If more bytes are written than fit in the array,
/// OutOfSpaceException will be thrown.
/// </summary>
private CodedOutputStream(byte[] buffer, int offset, int length)
{
this.output = null;
this.buffer = ProtoPreconditions.CheckNotNull(buffer, nameof(buffer));
this.state.position = offset;
this.state.limit = offset + length;
WriteBufferHelper.Initialize(this, out this.state.writeBufferHelper);
leaveOpen = true; // Simple way of avoiding trying to dispose of a null reference
}
private CodedOutputStream(Stream output, byte[] buffer, bool leaveOpen)
{
this.output = ProtoPreconditions.CheckNotNull(output, nameof(output));
this.buffer = buffer;
this.state.position = 0;
this.state.limit = buffer.Length;
WriteBufferHelper.Initialize(this, out this.state.writeBufferHelper);
this.leaveOpen = leaveOpen;
}
/// <summary>
/// Creates a new <see cref="CodedOutputStream" /> which write to the given stream, and disposes of that
/// stream when the returned <c>CodedOutputStream</c> is disposed.
/// </summary>
/// <param name="output">The stream to write to. It will be disposed when the returned <c>CodedOutputStream is disposed.</c></param>
public CodedOutputStream(Stream output) : this(output, DefaultBufferSize, false)
{
}
/// <summary>
/// Creates a new CodedOutputStream which write to the given stream and uses
/// the specified buffer size.
/// </summary>
/// <param name="output">The stream to write to. It will be disposed when the returned <c>CodedOutputStream is disposed.</c></param>
/// <param name="bufferSize">The size of buffer to use internally.</param>
public CodedOutputStream(Stream output, int bufferSize) : this(output, new byte[bufferSize], false)
{
}
/// <summary>
/// Creates a new CodedOutputStream which write to the given stream.
/// </summary>
/// <param name="output">The stream to write to.</param>
/// <param name="leaveOpen">If <c>true</c>, <paramref name="output"/> is left open when the returned <c>CodedOutputStream</c> is disposed;
/// if <c>false</c>, the provided stream is disposed as well.</param>
public CodedOutputStream(Stream output, bool leaveOpen) : this(output, DefaultBufferSize, leaveOpen)
{
}
/// <summary>
/// Creates a new CodedOutputStream which write to the given stream and uses
/// the specified buffer size.
/// </summary>
/// <param name="output">The stream to write to.</param>
/// <param name="bufferSize">The size of buffer to use internally.</param>
/// <param name="leaveOpen">If <c>true</c>, <paramref name="output"/> is left open when the returned <c>CodedOutputStream</c> is disposed;
/// if <c>false</c>, the provided stream is disposed as well.</param>
public CodedOutputStream(Stream output, int bufferSize, bool leaveOpen) : this(output, new byte[bufferSize], leaveOpen)
{
}
#endregion
/// <summary>
/// Returns the current position in the stream, or the position in the output buffer
/// </summary>
public long Position
{
get
{
if (output != null)
{
return output.Position + state.position;
}
return state.position;
}
}
/// <summary>
/// Configures whether or not serialization is deterministic.
/// </summary>
/// <remarks>
/// Deterministic serialization guarantees that for a given binary, equal messages (defined by the
/// equals methods in protos) will always be serialized to the same bytes. This implies:
/// <list type="bullet">
/// <item><description>Repeated serialization of a message will return the same bytes.</description></item>
/// <item><description>Different processes of the same binary (which may be executing on different machines)
/// will serialize equal messages to the same bytes.</description></item>
/// </list>
/// Note the deterministic serialization is NOT canonical across languages; it is also unstable
/// across different builds with schema changes due to unknown fields. Users who need canonical
/// serialization, e.g. persistent storage in a canonical form, fingerprinting, etc, should define
/// their own canonicalization specification and implement the serializer using reflection APIs
/// rather than relying on this API.
/// Once set, the serializer will: (Note this is an implementation detail and may subject to
/// change in the future)
/// <list type="bullet">
/// <item><description>Sort map entries by keys in lexicographical order or numerical order. Note: For string
/// keys, the order is based on comparing the UTF-16 code unit value of each character in the strings.
/// The order may be different from the deterministic serialization in other languages where
/// maps are sorted on the lexicographical order of the UTF8 encoded keys.</description></item>
/// </list>
/// </remarks>
public bool Deterministic { get; set; }
#region Writing of values (not including tags)
/// <summary>
/// Writes a double field value, without a tag, to the stream.
/// </summary>
/// <param name="value">The value to write</param>
public void WriteDouble(double value)
{
var span = new Span<byte>(buffer);
WritingPrimitives.WriteDouble(ref span, ref state, value);
}
/// <summary>
/// Writes a float field value, without a tag, to the stream.
/// </summary>
/// <param name="value">The value to write</param>
public void WriteFloat(float value)
{
var span = new Span<byte>(buffer);
WritingPrimitives.WriteFloat(ref span, ref state, value);
}
/// <summary>
/// Writes a uint64 field value, without a tag, to the stream.
/// </summary>
/// <param name="value">The value to write</param>
public void WriteUInt64(ulong value)
{
var span = new Span<byte>(buffer);
WritingPrimitives.WriteUInt64(ref span, ref state, value);
}
/// <summary>
/// Writes an int64 field value, without a tag, to the stream.
/// </summary>
/// <param name="value">The value to write</param>
public void WriteInt64(long value)
{
var span = new Span<byte>(buffer);
WritingPrimitives.WriteInt64(ref span, ref state, value);
}
/// <summary>
/// Writes an int32 field value, without a tag, to the stream.
/// </summary>
/// <param name="value">The value to write</param>
public void WriteInt32(int value)
{
var span = new Span<byte>(buffer);
WritingPrimitives.WriteInt32(ref span, ref state, value);
}
/// <summary>
/// Writes a fixed64 field value, without a tag, to the stream.
/// </summary>
/// <param name="value">The value to write</param>
public void WriteFixed64(ulong value)
{
var span = new Span<byte>(buffer);
WritingPrimitives.WriteFixed64(ref span, ref state, value);
}
/// <summary>
/// Writes a fixed32 field value, without a tag, to the stream.
/// </summary>
/// <param name="value">The value to write</param>
public void WriteFixed32(uint value)
{
var span = new Span<byte>(buffer);
WritingPrimitives.WriteFixed32(ref span, ref state, value);
}
/// <summary>
/// Writes a bool field value, without a tag, to the stream.
/// </summary>
/// <param name="value">The value to write</param>
public void WriteBool(bool value)
{
var span = new Span<byte>(buffer);
WritingPrimitives.WriteBool(ref span, ref state, value);
}
/// <summary>
/// Writes a string field value, without a tag, to the stream.
/// The data is length-prefixed.
/// </summary>
/// <param name="value">The value to write</param>
public void WriteString(string value)
{
var span = new Span<byte>(buffer);
WritingPrimitives.WriteString(ref span, ref state, value);
}
/// <summary>
/// Writes a message, without a tag, to the stream.
/// The data is length-prefixed.
/// </summary>
/// <param name="value">The value to write</param>
public void WriteMessage(IMessage value)
{
// TODO: if the message doesn't implement IBufferMessage (and thus does not provide the InternalWriteTo method),
// what we're doing here works fine, but could be more efficient.
// For now, this inefficiency is fine, considering this is only a backward-compatibility scenario (and regenerating the code fixes it).
var span = new Span<byte>(buffer);
WriteContext.Initialize(ref span, ref state, out WriteContext ctx);
try
{
WritingPrimitivesMessages.WriteMessage(ref ctx, value);
}
finally
{
ctx.CopyStateTo(this);
}
}
/// <summary>
/// Writes a message, without a tag, to the stream.
/// Only the message data is written, without a length-delimiter.
/// </summary>
/// <param name="value">The value to write</param>
public void WriteRawMessage(IMessage value)
{
// TODO: if the message doesn't implement IBufferMessage (and thus does not provide the InternalWriteTo method),
// what we're doing here works fine, but could be more efficient.
// For now, this inefficiency is fine, considering this is only a backward-compatibility scenario (and regenerating the code fixes it).
var span = new Span<byte>(buffer);
WriteContext.Initialize(ref span, ref state, out WriteContext ctx);
try
{
WritingPrimitivesMessages.WriteRawMessage(ref ctx, value);
}
finally
{
ctx.CopyStateTo(this);
}
}
/// <summary>
/// Writes a group, without a tag, to the stream.
/// </summary>
/// <param name="value">The value to write</param>
public void WriteGroup(IMessage value)
{
var span = new Span<byte>(buffer);
WriteContext.Initialize(ref span, ref state, out WriteContext ctx);
try
{
WritingPrimitivesMessages.WriteGroup(ref ctx, value);
}
finally
{
ctx.CopyStateTo(this);
}
}
/// <summary>
/// Write a byte string, without a tag, to the stream.
/// The data is length-prefixed.
/// </summary>
/// <param name="value">The value to write</param>
public void WriteBytes(ByteString value)
{
var span = new Span<byte>(buffer);
WritingPrimitives.WriteBytes(ref span, ref state, value);
}
/// <summary>
/// Writes a uint32 value, without a tag, to the stream.
/// </summary>
/// <param name="value">The value to write</param>
public void WriteUInt32(uint value)
{
var span = new Span<byte>(buffer);
WritingPrimitives.WriteUInt32(ref span, ref state, value);
}
/// <summary>
/// Writes an enum value, without a tag, to the stream.
/// </summary>
/// <param name="value">The value to write</param>
public void WriteEnum(int value)
{
var span = new Span<byte>(buffer);
WritingPrimitives.WriteEnum(ref span, ref state, value);
}
/// <summary>
/// Writes an sfixed32 value, without a tag, to the stream.
/// </summary>
/// <param name="value">The value to write.</param>
public void WriteSFixed32(int value)
{
var span = new Span<byte>(buffer);
WritingPrimitives.WriteSFixed32(ref span, ref state, value);
}
/// <summary>
/// Writes an sfixed64 value, without a tag, to the stream.
/// </summary>
/// <param name="value">The value to write</param>
public void WriteSFixed64(long value)
{
var span = new Span<byte>(buffer);
WritingPrimitives.WriteSFixed64(ref span, ref state, value);
}
/// <summary>
/// Writes an sint32 value, without a tag, to the stream.
/// </summary>
/// <param name="value">The value to write</param>
public void WriteSInt32(int value)
{
var span = new Span<byte>(buffer);
WritingPrimitives.WriteSInt32(ref span, ref state, value);
}
/// <summary>
/// Writes an sint64 value, without a tag, to the stream.
/// </summary>
/// <param name="value">The value to write</param>
public void WriteSInt64(long value)
{
var span = new Span<byte>(buffer);
WritingPrimitives.WriteSInt64(ref span, ref state, value);
}
/// <summary>
/// Writes a length (in bytes) for length-delimited data.
/// </summary>
/// <remarks>
/// This method simply writes a rawint, but exists for clarity in calling code.
/// </remarks>
/// <param name="length">Length value, in bytes.</param>
public void WriteLength(int length)
{
var span = new Span<byte>(buffer);
WritingPrimitives.WriteLength(ref span, ref state, length);
}
#endregion
#region Raw tag writing
/// <summary>
/// Encodes and writes a tag.
/// </summary>
/// <param name="fieldNumber">The number of the field to write the tag for</param>
/// <param name="type">The wire format type of the tag to write</param>
public void WriteTag(int fieldNumber, WireFormat.WireType type)
{
var span = new Span<byte>(buffer);
WritingPrimitives.WriteTag(ref span, ref state, fieldNumber, type);
}
/// <summary>
/// Writes an already-encoded tag.
/// </summary>
/// <param name="tag">The encoded tag</param>
public void WriteTag(uint tag)
{
var span = new Span<byte>(buffer);
WritingPrimitives.WriteTag(ref span, ref state, tag);
}
/// <summary>
/// Writes the given single-byte tag directly to the stream.
/// </summary>
/// <param name="b1">The encoded tag</param>
public void WriteRawTag(byte b1)
{
var span = new Span<byte>(buffer);
WritingPrimitives.WriteRawTag(ref span, ref state, b1);
}
/// <summary>
/// Writes the given two-byte tag directly to the stream.
/// </summary>
/// <param name="b1">The first byte of the encoded tag</param>
/// <param name="b2">The second byte of the encoded tag</param>
public void WriteRawTag(byte b1, byte b2)
{
var span = new Span<byte>(buffer);
WritingPrimitives.WriteRawTag(ref span, ref state, b1, b2);
}
/// <summary>
/// Writes the given three-byte tag directly to the stream.
/// </summary>
/// <param name="b1">The first byte of the encoded tag</param>
/// <param name="b2">The second byte of the encoded tag</param>
/// <param name="b3">The third byte of the encoded tag</param>
public void WriteRawTag(byte b1, byte b2, byte b3)
{
var span = new Span<byte>(buffer);
WritingPrimitives.WriteRawTag(ref span, ref state, b1, b2, b3);
}
/// <summary>
/// Writes the given four-byte tag directly to the stream.
/// </summary>
/// <param name="b1">The first byte of the encoded tag</param>
/// <param name="b2">The second byte of the encoded tag</param>
/// <param name="b3">The third byte of the encoded tag</param>
/// <param name="b4">The fourth byte of the encoded tag</param>
public void WriteRawTag(byte b1, byte b2, byte b3, byte b4)
{
var span = new Span<byte>(buffer);
WritingPrimitives.WriteRawTag(ref span, ref state, b1, b2, b3, b4);
}
/// <summary>
/// Writes the given five-byte tag directly to the stream.
/// </summary>
/// <param name="b1">The first byte of the encoded tag</param>
/// <param name="b2">The second byte of the encoded tag</param>
/// <param name="b3">The third byte of the encoded tag</param>
/// <param name="b4">The fourth byte of the encoded tag</param>
/// <param name="b5">The fifth byte of the encoded tag</param>
public void WriteRawTag(byte b1, byte b2, byte b3, byte b4, byte b5)
{
var span = new Span<byte>(buffer);
WritingPrimitives.WriteRawTag(ref span, ref state, b1, b2, b3, b4, b5);
}
#endregion
#region Underlying writing primitives
/// <summary>
/// Writes a 32 bit value as a varint. The fast route is taken when
/// there's enough buffer space left to whizz through without checking
/// for each byte; otherwise, we resort to calling WriteRawByte each time.
/// </summary>
internal void WriteRawVarint32(uint value)
{
var span = new Span<byte>(buffer);
WritingPrimitives.WriteRawVarint32(ref span, ref state, value);
}
internal void WriteRawVarint64(ulong value)
{
var span = new Span<byte>(buffer);
WritingPrimitives.WriteRawVarint64(ref span, ref state, value);
}
internal void WriteRawLittleEndian32(uint value)
{
var span = new Span<byte>(buffer);
WritingPrimitives.WriteRawLittleEndian32(ref span, ref state, value);
}
internal void WriteRawLittleEndian64(ulong value)
{
var span = new Span<byte>(buffer);
WritingPrimitives.WriteRawLittleEndian64(ref span, ref state, value);
}
/// <summary>
/// Writes out an array of bytes.
/// </summary>
internal void WriteRawBytes(byte[] value)
{
WriteRawBytes(value, 0, value.Length);
}
/// <summary>
/// Writes out part of an array of bytes.
/// </summary>
internal void WriteRawBytes(byte[] value, int offset, int length)
{
var span = new Span<byte>(buffer);
WritingPrimitives.WriteRawBytes(ref span, ref state, value, offset, length);
}
#endregion
/// <summary>
/// Indicates that a CodedOutputStream wrapping a flat byte array
/// ran out of space.
/// </summary>
public sealed class OutOfSpaceException : IOException
{
internal OutOfSpaceException()
: base("CodedOutputStream was writing to a flat byte array and ran out of space.")
{
}
}
/// <summary>
/// Flushes any buffered data and optionally closes the underlying stream, if any.
/// </summary>
/// <remarks>
/// <para>
/// By default, any underlying stream is closed by this method. To configure this behaviour,
/// use a constructor overload with a <c>leaveOpen</c> parameter. If this instance does not
/// have an underlying stream, this method does nothing.
/// </para>
/// <para>
/// For the sake of efficiency, calling this method does not prevent future write calls - but
/// if a later write ends up writing to a stream which has been disposed, that is likely to
/// fail. It is recommend that you not call any other methods after this.
/// </para>
/// </remarks>
public void Dispose()
{
Flush();
if (!leaveOpen)
{
output.Dispose();
}
}
/// <summary>
/// Flushes any buffered data to the underlying stream (if there is one).
/// </summary>
public void Flush()
{
var span = new Span<byte>(buffer);
WriteBufferHelper.Flush(ref span, ref state);
}
/// <summary>
/// Verifies that SpaceLeft returns zero. It's common to create a byte array
/// that is exactly big enough to hold a message, then write to it with
/// a CodedOutputStream. Calling CheckNoSpaceLeft after writing verifies that
/// the message was actually as big as expected, which can help finding bugs.
/// </summary>
public void CheckNoSpaceLeft()
{
WriteBufferHelper.CheckNoSpaceLeft(ref state);
}
/// <summary>
/// If writing to a flat array, returns the space left in the array. Otherwise,
/// throws an InvalidOperationException.
/// </summary>
public int SpaceLeft => WriteBufferHelper.GetSpaceLeft(ref state);
internal byte[] InternalBuffer => buffer;
internal Stream InternalOutputStream => output;
internal ref WriterInternalState InternalState => ref state;
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 79ad02c5589e31e49a8c14beb07d8e01
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: 011bc81dfcaad4e4f93d75625e1fb321
folderAsset: yes
DefaultImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,65 @@
#region Copyright notice and license
// Protocol Buffers - Google's data interchange format
// Copyright 2017 Google Inc. All rights reserved.
//
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file or at
// https://developers.google.com/open-source/licenses/bsd
#endregion
using System.Collections.Generic;
namespace Google.Protobuf.Collections
{
/// <summary>
/// Utility to compare if two Lists are the same, and the hash code
/// of a List.
/// </summary>
public static class Lists
{
/// <summary>
/// Checks if two lists are equal.
/// </summary>
public static bool Equals<T>(List<T> left, List<T> right)
{
if (left == right)
{
return true;
}
if (left == null || right == null)
{
return false;
}
if (left.Count != right.Count)
{
return false;
}
IEqualityComparer<T> comparer = EqualityComparer<T>.Default;
for (int i = 0; i < left.Count; i++)
{
if (!comparer.Equals(left[i], right[i]))
{
return false;
}
}
return true;
}
/// <summary>
/// Gets the list's hash code.
/// </summary>
public static int GetHashCode<T>(List<T> list)
{
if (list == null)
{
return 0;
}
int hash = 31;
foreach (T element in list)
{
hash = hash * 29 + element.GetHashCode();
}
return hash;
}
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 59a79b6bbb50cdd4f8af299a4f934b58
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,737 @@
#region Copyright notice and license
// Protocol Buffers - Google's data interchange format
// Copyright 2015 Google Inc. All rights reserved.
//
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file or at
// https://developers.google.com/open-source/licenses/bsd
#endregion
using Google.Protobuf.Compatibility;
using System;
using System.Collections;
using System.Collections.Generic;
using System.Diagnostics;
using System.IO;
using System.Linq;
using System.Security;
namespace Google.Protobuf.Collections
{
/// <summary>
/// Representation of a map field in a Protocol Buffer message.
/// </summary>
/// <typeparam name="TKey">Key type in the map. Must be a type supported by Protocol Buffer map keys.</typeparam>
/// <typeparam name="TValue">Value type in the map. Must be a type supported by Protocol Buffers.</typeparam>
/// <remarks>
/// <para>
/// For string keys, the equality comparison is provided by <see cref="StringComparer.Ordinal" />.
/// </para>
/// <para>
/// Null values are not permitted in the map, either for wrapper types or regular messages.
/// If a map is deserialized from a data stream and the value is missing from an entry, a default value
/// is created instead. For primitive types, that is the regular default value (0, the empty string and so
/// on); for message types, an empty instance of the message is created, as if the map entry contained a 0-length
/// encoded value for the field.
/// </para>
/// <para>
/// This implementation does not generally prohibit the use of key/value types which are not
/// supported by Protocol Buffers (e.g. using a key type of <code>byte</code>) but nor does it guarantee
/// that all operations will work in such cases.
/// </para>
/// <para>
/// The order in which entries are returned when iterating over this object is undefined, and may change
/// in future versions.
/// </para>
/// </remarks>
[DebuggerDisplay("Count = {Count}")]
[DebuggerTypeProxy(typeof(MapField<,>.MapFieldDebugView))]
public sealed class MapField<TKey, TValue> : IDeepCloneable<MapField<TKey, TValue>>, IDictionary<TKey, TValue>, IEquatable<MapField<TKey, TValue>>, IDictionary, IReadOnlyDictionary<TKey, TValue>
{
private static readonly EqualityComparer<TValue> ValueEqualityComparer = ProtobufEqualityComparers.GetEqualityComparer<TValue>();
private static readonly EqualityComparer<TKey> KeyEqualityComparer = ProtobufEqualityComparers.GetEqualityComparer<TKey>();
// TODO: Don't create the map/list until we have an entry. (Assume many maps will be empty.)
private readonly Dictionary<TKey, LinkedListNode<KeyValuePair<TKey, TValue>>> map = new(KeyEqualityComparer);
private readonly LinkedList<KeyValuePair<TKey, TValue>> list = new();
/// <summary>
/// Creates a deep clone of this object.
/// </summary>
/// <returns>
/// A deep clone of this object.
/// </returns>
public MapField<TKey, TValue> Clone()
{
var clone = new MapField<TKey, TValue>();
// Keys are never cloneable. Values might be.
if (typeof(IDeepCloneable<TValue>).IsAssignableFrom(typeof(TValue)))
{
foreach (var pair in list)
{
clone.Add(pair.Key, ((IDeepCloneable<TValue>)pair.Value).Clone());
}
}
else
{
// Nothing is cloneable, so we don't need to worry.
clone.Add(this);
}
return clone;
}
/// <summary>
/// Adds the specified key/value pair to the map.
/// </summary>
/// <remarks>
/// This operation fails if the key already exists in the map. To replace an existing entry, use the indexer.
/// </remarks>
/// <param name="key">The key to add</param>
/// <param name="value">The value to add.</param>
/// <exception cref="System.ArgumentException">The given key already exists in map.</exception>
public void Add(TKey key, TValue value)
{
// Validation of arguments happens in ContainsKey and the indexer
if (ContainsKey(key))
{
throw new ArgumentException("Key already exists in map", nameof(key));
}
this[key] = value;
}
/// <summary>
/// Determines whether the specified key is present in the map.
/// </summary>
/// <param name="key">The key to check.</param>
/// <returns><c>true</c> if the map contains the given key; <c>false</c> otherwise.</returns>
public bool ContainsKey(TKey key)
{
ProtoPreconditions.CheckNotNullUnconstrained(key, nameof(key));
return map.ContainsKey(key);
}
private bool ContainsValue(TValue value) =>
list.Any(pair => ValueEqualityComparer.Equals(pair.Value, value));
/// <summary>
/// Removes the entry identified by the given key from the map.
/// </summary>
/// <param name="key">The key indicating the entry to remove from the map.</param>
/// <returns><c>true</c> if the map contained the given key before the entry was removed; <c>false</c> otherwise.</returns>
public bool Remove(TKey key)
{
ProtoPreconditions.CheckNotNullUnconstrained(key, nameof(key));
if (map.TryGetValue(key, out LinkedListNode<KeyValuePair<TKey, TValue>> node))
{
map.Remove(key);
node.List.Remove(node);
return true;
}
else
{
return false;
}
}
/// <summary>
/// Gets the value associated with the specified key.
/// </summary>
/// <param name="key">The key whose value to get.</param>
/// <param name="value">When this method returns, the value associated with the specified key, if the key is found;
/// otherwise, the default value for the type of the <paramref name="value"/> parameter.
/// This parameter is passed uninitialized.</param>
/// <returns><c>true</c> if the map contains an element with the specified key; otherwise, <c>false</c>.</returns>
public bool TryGetValue(TKey key, out TValue value)
{
if (map.TryGetValue(key, out LinkedListNode<KeyValuePair<TKey, TValue>> node))
{
value = node.Value.Value;
return true;
}
else
{
value = default;
return false;
}
}
/// <summary>
/// Gets or sets the value associated with the specified key.
/// </summary>
/// <param name="key">The key of the value to get or set.</param>
/// <exception cref="KeyNotFoundException">The property is retrieved and key does not exist in the collection.</exception>
/// <returns>The value associated with the specified key. If the specified key is not found,
/// a get operation throws a <see cref="KeyNotFoundException"/>, and a set operation creates a new element with the specified key.</returns>
public TValue this[TKey key]
{
get
{
ProtoPreconditions.CheckNotNullUnconstrained(key, nameof(key));
if (TryGetValue(key, out TValue value))
{
return value;
}
throw new KeyNotFoundException();
}
set
{
ProtoPreconditions.CheckNotNullUnconstrained(key, nameof(key));
// value == null check here is redundant, but avoids boxing.
if (value == null)
{
ProtoPreconditions.CheckNotNullUnconstrained(value, nameof(value));
}
var pair = new KeyValuePair<TKey, TValue>(key, value);
if (map.TryGetValue(key, out LinkedListNode<KeyValuePair<TKey, TValue>> node))
{
node.Value = pair;
}
else
{
node = list.AddLast(pair);
map[key] = node;
}
}
}
/// <summary>
/// Gets a collection containing the keys in the map.
/// </summary>
public ICollection<TKey> Keys => new MapView<TKey>(this, pair => pair.Key, ContainsKey);
/// <summary>
/// Gets a collection containing the values in the map.
/// </summary>
public ICollection<TValue> Values => new MapView<TValue>(this, pair => pair.Value, ContainsValue);
/// <summary>
/// Adds the specified entries to the map. The keys and values are not automatically cloned.
/// </summary>
/// <param name="entries">The entries to add to the map.</param>
public void Add(IDictionary<TKey, TValue> entries)
{
ProtoPreconditions.CheckNotNull(entries, nameof(entries));
foreach (var pair in entries)
{
Add(pair.Key, pair.Value);
}
}
/// <summary>
/// Adds the specified entries to the map, replacing any existing entries with the same keys.
/// The keys and values are not automatically cloned.
/// </summary>
/// <remarks>This method primarily exists to be called from MergeFrom methods in generated classes for messages.</remarks>
/// <param name="entries">The entries to add to the map.</param>
public void MergeFrom(IDictionary<TKey, TValue> entries)
{
ProtoPreconditions.CheckNotNull(entries, nameof(entries));
foreach (var pair in entries)
{
this[pair.Key] = pair.Value;
}
}
/// <summary>
/// Returns an enumerator that iterates through the collection.
/// </summary>
/// <returns>
/// An enumerator that can be used to iterate through the collection.
/// </returns>
public IEnumerator<KeyValuePair<TKey, TValue>> GetEnumerator() => list.GetEnumerator();
/// <summary>
/// Returns an enumerator that iterates through a collection.
/// </summary>
/// <returns>
/// An <see cref="T:System.Collections.IEnumerator" /> object that can be used to iterate through the collection.
/// </returns>
IEnumerator IEnumerable.GetEnumerator() => GetEnumerator();
/// <summary>
/// Adds the specified item to the map.
/// </summary>
/// <param name="item">The item to add to the map.</param>
void ICollection<KeyValuePair<TKey, TValue>>.Add(KeyValuePair<TKey, TValue> item) => Add(item.Key, item.Value);
/// <summary>
/// Removes all items from the map.
/// </summary>
public void Clear()
{
list.Clear();
map.Clear();
}
/// <summary>
/// Determines whether map contains an entry equivalent to the given key/value pair.
/// </summary>
/// <param name="item">The key/value pair to find.</param>
/// <returns></returns>
bool ICollection<KeyValuePair<TKey, TValue>>.Contains(KeyValuePair<TKey, TValue> item) =>
TryGetValue(item.Key, out TValue value) && ValueEqualityComparer.Equals(item.Value, value);
/// <summary>
/// Copies the key/value pairs in this map to an array.
/// </summary>
/// <param name="array">The array to copy the entries into.</param>
/// <param name="arrayIndex">The index of the array at which to start copying values.</param>
void ICollection<KeyValuePair<TKey, TValue>>.CopyTo(KeyValuePair<TKey, TValue>[] array, int arrayIndex) =>
list.CopyTo(array, arrayIndex);
/// <summary>
/// Removes the specified key/value pair from the map.
/// </summary>
/// <remarks>Both the key and the value must be found for the entry to be removed.</remarks>
/// <param name="item">The key/value pair to remove.</param>
/// <returns><c>true</c> if the key/value pair was found and removed; <c>false</c> otherwise.</returns>
bool ICollection<KeyValuePair<TKey, TValue>>.Remove(KeyValuePair<TKey, TValue> item)
{
if (item.Key == null)
{
throw new ArgumentException("Key is null", nameof(item));
}
if (map.TryGetValue(item.Key, out LinkedListNode<KeyValuePair<TKey, TValue>> node) &&
EqualityComparer<TValue>.Default.Equals(item.Value, node.Value.Value))
{
map.Remove(item.Key);
node.List.Remove(node);
return true;
}
else
{
return false;
}
}
/// <summary>
/// Gets the number of elements contained in the map.
/// </summary>
public int Count => list.Count;
/// <summary>
/// Gets a value indicating whether the map is read-only.
/// </summary>
public bool IsReadOnly => false;
/// <summary>
/// Determines whether the specified <see cref="System.Object" />, is equal to this instance.
/// </summary>
/// <param name="other">The <see cref="System.Object" /> to compare with this instance.</param>
/// <returns>
/// <c>true</c> if the specified <see cref="System.Object" /> is equal to this instance; otherwise, <c>false</c>.
/// </returns>
public override bool Equals(object other) => Equals(other as MapField<TKey, TValue>);
/// <summary>
/// Returns a hash code for this instance.
/// </summary>
/// <returns>
/// A hash code for this instance, suitable for use in hashing algorithms and data structures like a hash table.
/// </returns>
public override int GetHashCode()
{
var keyComparer = KeyEqualityComparer;
var valueComparer = ValueEqualityComparer;
int hash = 0;
foreach (var pair in list)
{
hash ^= keyComparer.GetHashCode(pair.Key) * 31 + valueComparer.GetHashCode(pair.Value);
}
return hash;
}
/// <summary>
/// Compares this map with another for equality.
/// </summary>
/// <remarks>
/// The order of the key/value pairs in the maps is not deemed significant in this comparison.
/// </remarks>
/// <param name="other">The map to compare this with.</param>
/// <returns><c>true</c> if <paramref name="other"/> refers to an equal map; <c>false</c> otherwise.</returns>
public bool Equals(MapField<TKey, TValue> other)
{
if (other == null)
{
return false;
}
if (other == this)
{
return true;
}
if (other.Count != this.Count)
{
return false;
}
var valueComparer = ValueEqualityComparer;
foreach (var pair in this)
{
if (!other.TryGetValue(pair.Key, out TValue value))
{
return false;
}
if (!valueComparer.Equals(value, pair.Value))
{
return false;
}
}
return true;
}
/// <summary>
/// Adds entries to the map from the given stream.
/// </summary>
/// <remarks>
/// It is assumed that the stream is initially positioned after the tag specified by the codec.
/// This method will continue reading entries from the stream until the end is reached, or
/// a different tag is encountered.
/// </remarks>
/// <param name="input">Stream to read from</param>
/// <param name="codec">Codec describing how the key/value pairs are encoded</param>
public void AddEntriesFrom(CodedInputStream input, Codec codec)
{
ParseContext.Initialize(input, out ParseContext ctx);
try
{
AddEntriesFrom(ref ctx, codec);
}
finally
{
ctx.CopyStateTo(input);
}
}
/// <summary>
/// Adds entries to the map from the given parse context.
/// </summary>
/// <remarks>
/// It is assumed that the input is initially positioned after the tag specified by the codec.
/// This method will continue reading entries from the input until the end is reached, or
/// a different tag is encountered.
/// </remarks>
/// <param name="ctx">Input to read from</param>
/// <param name="codec">Codec describing how the key/value pairs are encoded</param>
[SecuritySafeCritical]
public void AddEntriesFrom(ref ParseContext ctx, Codec codec)
{
do
{
KeyValuePair<TKey, TValue> entry = ParsingPrimitivesMessages.ReadMapEntry(ref ctx, codec);
this[entry.Key] = entry.Value;
} while (ParsingPrimitives.MaybeConsumeTag(ref ctx.buffer, ref ctx.state, codec.MapTag));
}
/// <summary>
/// Writes the contents of this map to the given coded output stream, using the specified codec
/// to encode each entry.
/// </summary>
/// <param name="output">The output stream to write to.</param>
/// <param name="codec">The codec to use for each entry.</param>
public void WriteTo(CodedOutputStream output, Codec codec)
{
WriteContext.Initialize(output, out WriteContext ctx);
try
{
IEnumerable<KeyValuePair<TKey, TValue>> listToWrite = list;
if (output.Deterministic)
{
listToWrite = GetSortedListCopy(list);
}
WriteTo(ref ctx, codec, listToWrite);
}
finally
{
ctx.CopyStateTo(output);
}
}
internal IEnumerable<KeyValuePair<TKey, TValue>> GetSortedListCopy(IEnumerable<KeyValuePair<TKey, TValue>> listToSort)
{
// We can't sort the list in place, as that would invalidate the linked list.
// Instead, we create a new list, sort that, and then write it out.
var listToWrite = new List<KeyValuePair<TKey, TValue>>(listToSort);
listToWrite.Sort((pair1, pair2) =>
{
if (typeof(TKey) == typeof(string))
{
// Use Ordinal, otherwise Comparer<string>.Default uses StringComparer.CurrentCulture
return StringComparer.Ordinal.Compare(pair1.Key.ToString(), pair2.Key.ToString());
}
return Comparer<TKey>.Default.Compare(pair1.Key, pair2.Key);
});
return listToWrite;
}
/// <summary>
/// Writes the contents of this map to the given write context, using the specified codec
/// to encode each entry.
/// </summary>
/// <param name="ctx">The write context to write to.</param>
/// <param name="codec">The codec to use for each entry.</param>
[SecuritySafeCritical]
public void WriteTo(ref WriteContext ctx, Codec codec)
{
IEnumerable<KeyValuePair<TKey, TValue>> listToWrite = list;
if (ctx.state.CodedOutputStream?.Deterministic ?? false)
{
listToWrite = GetSortedListCopy(list);
}
WriteTo(ref ctx, codec, listToWrite);
}
[SecuritySafeCritical]
private void WriteTo(ref WriteContext ctx, Codec codec, IEnumerable<KeyValuePair<TKey, TValue>> listKvp)
{
foreach (var entry in listKvp)
{
ctx.WriteTag(codec.MapTag);
WritingPrimitives.WriteLength(ref ctx.buffer, ref ctx.state, CalculateEntrySize(codec, entry));
codec.KeyCodec.WriteTagAndValue(ref ctx, entry.Key);
codec.ValueCodec.WriteTagAndValue(ref ctx, entry.Value);
}
}
/// <summary>
/// Calculates the size of this map based on the given entry codec.
/// </summary>
/// <param name="codec">The codec to use to encode each entry.</param>
/// <returns></returns>
public int CalculateSize(Codec codec)
{
if (Count == 0)
{
return 0;
}
int size = 0;
foreach (var entry in list)
{
int entrySize = CalculateEntrySize(codec, entry);
size += CodedOutputStream.ComputeRawVarint32Size(codec.MapTag);
size += CodedOutputStream.ComputeLengthSize(entrySize) + entrySize;
}
return size;
}
private static int CalculateEntrySize(Codec codec, KeyValuePair<TKey, TValue> entry)
{
return codec.KeyCodec.CalculateSizeWithTag(entry.Key) + codec.ValueCodec.CalculateSizeWithTag(entry.Value);
}
/// <summary>
/// Returns a string representation of this repeated field, in the same
/// way as it would be represented by the default JSON formatter.
/// </summary>
public override string ToString()
{
var writer = new StringWriter();
JsonFormatter.Default.WriteDictionary(writer, this);
return writer.ToString();
}
#region IDictionary explicit interface implementation
void IDictionary.Add(object key, object value) => Add((TKey)key, (TValue)value);
bool IDictionary.Contains(object key) => key is TKey k && ContainsKey(k);
IDictionaryEnumerator IDictionary.GetEnumerator() => new DictionaryEnumerator(GetEnumerator());
void IDictionary.Remove(object key)
{
ProtoPreconditions.CheckNotNull(key, nameof(key));
if (key is TKey k)
{
Remove(k);
}
}
void ICollection.CopyTo(Array array, int index)
{
// This is ugly and slow as heck, but with any luck it will never be used anyway.
ICollection temp = this.Select(pair => new DictionaryEntry(pair.Key, pair.Value)).ToList();
temp.CopyTo(array, index);
}
bool IDictionary.IsFixedSize => false;
ICollection IDictionary.Keys => (ICollection)Keys;
ICollection IDictionary.Values => (ICollection)Values;
bool ICollection.IsSynchronized => false;
object ICollection.SyncRoot => this;
object IDictionary.this[object key]
{
get
{
ProtoPreconditions.CheckNotNull(key, nameof(key));
if (key is TKey k)
{
TryGetValue(k, out TValue value);
return value;
}
return null;
}
set
{
this[(TKey)key] = (TValue)value;
}
}
#endregion
#region IReadOnlyDictionary explicit interface implementation
IEnumerable<TKey> IReadOnlyDictionary<TKey, TValue>.Keys => Keys;
IEnumerable<TValue> IReadOnlyDictionary<TKey, TValue>.Values => Values;
#endregion
private class DictionaryEnumerator : IDictionaryEnumerator
{
private readonly IEnumerator<KeyValuePair<TKey, TValue>> enumerator;
internal DictionaryEnumerator(IEnumerator<KeyValuePair<TKey, TValue>> enumerator)
{
this.enumerator = enumerator;
}
public bool MoveNext() => enumerator.MoveNext();
public void Reset() => enumerator.Reset();
public object Current => Entry;
public DictionaryEntry Entry => new DictionaryEntry(Key, Value);
public object Key => enumerator.Current.Key;
public object Value => enumerator.Current.Value;
}
/// <summary>
/// A codec for a specific map field. This contains all the information required to encode and
/// decode the nested messages.
/// </summary>
public sealed class Codec
{
private readonly FieldCodec<TKey> keyCodec;
private readonly FieldCodec<TValue> valueCodec;
private readonly uint mapTag;
/// <summary>
/// Creates a new entry codec based on a separate key codec and value codec,
/// and the tag to use for each map entry.
/// </summary>
/// <param name="keyCodec">The key codec.</param>
/// <param name="valueCodec">The value codec.</param>
/// <param name="mapTag">The map tag to use to introduce each map entry.</param>
public Codec(FieldCodec<TKey> keyCodec, FieldCodec<TValue> valueCodec, uint mapTag)
{
this.keyCodec = keyCodec;
this.valueCodec = valueCodec;
this.mapTag = mapTag;
}
/// <summary>
/// The key codec.
/// </summary>
internal FieldCodec<TKey> KeyCodec => keyCodec;
/// <summary>
/// The value codec.
/// </summary>
internal FieldCodec<TValue> ValueCodec => valueCodec;
/// <summary>
/// The tag used in the enclosing message to indicate map entries.
/// </summary>
internal uint MapTag => mapTag;
}
private class MapView<T> : ICollection<T>, ICollection
{
private readonly MapField<TKey, TValue> parent;
private readonly Func<KeyValuePair<TKey, TValue>, T> projection;
private readonly Func<T, bool> containsCheck;
internal MapView(
MapField<TKey, TValue> parent,
Func<KeyValuePair<TKey, TValue>, T> projection,
Func<T, bool> containsCheck)
{
this.parent = parent;
this.projection = projection;
this.containsCheck = containsCheck;
}
public int Count => parent.Count;
public bool IsReadOnly => true;
public bool IsSynchronized => false;
public object SyncRoot => parent;
public void Add(T item) => throw new NotSupportedException();
public void Clear() => throw new NotSupportedException();
public bool Contains(T item) => containsCheck(item);
public void CopyTo(T[] array, int arrayIndex)
{
if (arrayIndex < 0)
{
throw new ArgumentOutOfRangeException(nameof(arrayIndex));
}
if (arrayIndex + Count > array.Length)
{
throw new ArgumentException("Not enough space in the array", nameof(array));
}
foreach (var item in this)
{
array[arrayIndex++] = item;
}
}
public IEnumerator<T> GetEnumerator()
{
return parent.list.Select(projection).GetEnumerator();
}
public bool Remove(T item) => throw new NotSupportedException();
IEnumerator IEnumerable.GetEnumerator() => GetEnumerator();
public void CopyTo(Array array, int index)
{
if (index < 0)
{
throw new ArgumentOutOfRangeException(nameof(index));
}
if (index + Count > array.Length)
{
throw new ArgumentException("Not enough space in the array", nameof(array));
}
foreach (var item in this)
{
array.SetValue(item, index++);
}
}
}
private sealed class MapFieldDebugView
{
private readonly MapField<TKey, TValue> map;
public MapFieldDebugView(MapField<TKey, TValue> map)
{
this.map = map;
}
[DebuggerBrowsable(DebuggerBrowsableState.RootHidden)]
public KeyValuePair<TKey, TValue>[] Items => map.list.ToArray();
}
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 2d17e06f635e50745bbffabc4e671f0c
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,107 @@
#region Copyright notice and license
// Protocol Buffers - Google's data interchange format
// Copyright 2017 Google Inc. All rights reserved.
//
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file or at
// https://developers.google.com/open-source/licenses/bsd
#endregion
using System;
using System.Collections.Generic;
namespace Google.Protobuf.Collections
{
/// <summary>
/// Provides a central place to implement equality comparisons, primarily for bitwise float/double equality.
/// </summary>
public static class ProtobufEqualityComparers
{
/// <summary>
/// Returns an equality comparer for <typeparamref name="T"/> suitable for Protobuf equality comparisons.
/// This is usually just the default equality comparer for the type, but floating point numbers are compared
/// bitwise.
/// </summary>
/// <typeparam name="T">The type of equality comparer to return.</typeparam>
/// <returns>The equality comparer.</returns>
public static EqualityComparer<T> GetEqualityComparer<T>()
{
return typeof(T) == typeof(double) ? (EqualityComparer<T>) (object) BitwiseDoubleEqualityComparer
: typeof(T) == typeof(float) ? (EqualityComparer<T>) (object) BitwiseSingleEqualityComparer
: typeof(T) == typeof(double?) ? (EqualityComparer<T>) (object) BitwiseNullableDoubleEqualityComparer
: typeof(T) == typeof(float?) ? (EqualityComparer<T>) (object) BitwiseNullableSingleEqualityComparer
: EqualityComparer<T>.Default;
}
/// <summary>
/// Returns an equality comparer suitable for comparing 64-bit floating point values, by bitwise comparison.
/// (NaN values are considered equal, but only when they have the same representation.)
/// </summary>
public static EqualityComparer<double> BitwiseDoubleEqualityComparer { get; } = new BitwiseDoubleEqualityComparerImpl();
/// <summary>
/// Returns an equality comparer suitable for comparing 32-bit floating point values, by bitwise comparison.
/// (NaN values are considered equal, but only when they have the same representation.)
/// </summary>
public static EqualityComparer<float> BitwiseSingleEqualityComparer { get; } = new BitwiseSingleEqualityComparerImpl();
/// <summary>
/// Returns an equality comparer suitable for comparing nullable 64-bit floating point values, by bitwise comparison.
/// (NaN values are considered equal, but only when they have the same representation.)
/// </summary>
public static EqualityComparer<double?> BitwiseNullableDoubleEqualityComparer { get; } = new BitwiseNullableDoubleEqualityComparerImpl();
/// <summary>
/// Returns an equality comparer suitable for comparing nullable 32-bit floating point values, by bitwise comparison.
/// (NaN values are considered equal, but only when they have the same representation.)
/// </summary>
public static EqualityComparer<float?> BitwiseNullableSingleEqualityComparer { get; } = new BitwiseNullableSingleEqualityComparerImpl();
private class BitwiseDoubleEqualityComparerImpl : EqualityComparer<double>
{
public override bool Equals(double x, double y) =>
BitConverter.DoubleToInt64Bits(x) == BitConverter.DoubleToInt64Bits(y);
public override int GetHashCode(double obj) =>
BitConverter.DoubleToInt64Bits(obj).GetHashCode();
}
private class BitwiseSingleEqualityComparerImpl : EqualityComparer<float>
{
// Just promote values to double and use BitConverter.DoubleToInt64Bits,
// as there's no BitConverter.SingleToInt32Bits, unfortunately.
public override bool Equals(float x, float y) =>
BitConverter.DoubleToInt64Bits(x) == BitConverter.DoubleToInt64Bits(y);
public override int GetHashCode(float obj) =>
BitConverter.DoubleToInt64Bits(obj).GetHashCode();
}
private class BitwiseNullableDoubleEqualityComparerImpl : EqualityComparer<double?>
{
public override bool Equals(double? x, double? y) =>
x == null && y == null ? true
: x == null || y == null ? false
: BitwiseDoubleEqualityComparer.Equals(x.Value, y.Value);
// The hash code for null is just a constant which is at least *unlikely* to be used
// elsewhere. (Compared with 0, say.)
public override int GetHashCode(double? obj) =>
obj == null ? 293864 : BitwiseDoubleEqualityComparer.GetHashCode(obj.Value);
}
private class BitwiseNullableSingleEqualityComparerImpl : EqualityComparer<float?>
{
public override bool Equals(float? x, float? y) =>
x == null && y == null ? true
: x == null || y == null ? false
: BitwiseSingleEqualityComparer.Equals(x.Value, y.Value);
// The hash code for null is just a constant which is at least *unlikely* to be used
// elsewhere. (Compared with 0, say.)
public override int GetHashCode(float? obj) =>
obj == null ? 293864 : BitwiseSingleEqualityComparer.GetHashCode(obj.Value);
}
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: fe847f0b8351a874186d0dead84866dd
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,663 @@
#region Copyright notice and license
// Protocol Buffers - Google's data interchange format
// Copyright 2015 Google Inc. All rights reserved.
//
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file or at
// https://developers.google.com/open-source/licenses/bsd
#endregion
using System;
using System.Collections;
using System.Collections.Generic;
using System.Diagnostics;
using System.IO;
using System.Linq;
using System.Security;
namespace Google.Protobuf.Collections
{
/// <summary>
/// The contents of a repeated field: essentially, a collection with some extra
/// restrictions (no null values) and capabilities (deep cloning).
/// </summary>
/// <remarks>
/// This implementation does not generally prohibit the use of types which are not
/// supported by Protocol Buffers but nor does it guarantee that all operations will work in such cases.
/// </remarks>
/// <typeparam name="T">The element type of the repeated field.</typeparam>
[DebuggerDisplay("Count = {Count}")]
[DebuggerTypeProxy(typeof(RepeatedField<>.RepeatedFieldDebugView))]
public sealed class RepeatedField<T> : IList<T>, IList, IDeepCloneable<RepeatedField<T>>, IEquatable<RepeatedField<T>>, IReadOnlyList<T>
{
private static readonly EqualityComparer<T> EqualityComparer = ProtobufEqualityComparers.GetEqualityComparer<T>();
private static readonly T[] EmptyArray = new T[0];
private const int MinArraySize = 8;
private T[] array = EmptyArray;
private int count = 0;
/// <summary>
/// Creates a deep clone of this repeated field.
/// </summary>
/// <remarks>
/// If the field type is
/// a message type, each element is also cloned; otherwise, it is
/// assumed that the field type is primitive (including string and
/// bytes, both of which are immutable) and so a simple copy is
/// equivalent to a deep clone.
/// </remarks>
/// <returns>A deep clone of this repeated field.</returns>
public RepeatedField<T> Clone()
{
RepeatedField<T> clone = new RepeatedField<T>();
if (array != EmptyArray)
{
clone.array = (T[])array.Clone();
if (clone.array is IDeepCloneable<T>[] cloneableArray)
{
for (int i = 0; i < count; i++)
{
clone.array[i] = cloneableArray[i].Clone();
}
}
}
clone.count = count;
return clone;
}
/// <summary>
/// Adds the entries from the given input stream, decoding them with the specified codec.
/// </summary>
/// <param name="input">The input stream to read from.</param>
/// <param name="codec">The codec to use in order to read each entry.</param>
public void AddEntriesFrom(CodedInputStream input, FieldCodec<T> codec)
{
ParseContext.Initialize(input, out ParseContext ctx);
try
{
AddEntriesFrom(ref ctx, codec);
}
finally
{
ctx.CopyStateTo(input);
}
}
/// <summary>
/// Adds the entries from the given parse context, decoding them with the specified codec.
/// </summary>
/// <param name="ctx">The input to read from.</param>
/// <param name="codec">The codec to use in order to read each entry.</param>
[SecuritySafeCritical]
public void AddEntriesFrom(ref ParseContext ctx, FieldCodec<T> codec)
{
// TODO: Inline some of the Add code, so we can avoid checking the size on every
// iteration.
uint tag = ctx.state.lastTag;
var reader = codec.ValueReader;
// Non-nullable value types can be packed or not.
if (FieldCodec<T>.IsPackedRepeatedField(tag))
{
int length = ctx.ReadLength();
if (length > 0)
{
int oldLimit = SegmentedBufferHelper.PushLimit(ref ctx.state, length);
// If the content is fixed size then we can calculate the length
// of the repeated field and pre-initialize the underlying collection.
//
// Check that the supplied length doesn't exceed the underlying buffer.
// That prevents a malicious length from initializing a very large collection.
if (codec.FixedSize > 0 && length % codec.FixedSize == 0 && ParsingPrimitives.IsDataAvailable(ref ctx.state, length))
{
EnsureSize(count + (length / codec.FixedSize));
while (!SegmentedBufferHelper.IsReachedLimit(ref ctx.state))
{
// Only FieldCodecs with a fixed size can reach here, and they are all known
// types that don't allow the user to specify a custom reader action.
// reader action will never return null.
array[count++] = reader(ref ctx);
}
}
else
{
// Content is variable size so add until we reach the limit.
while (!SegmentedBufferHelper.IsReachedLimit(ref ctx.state))
{
Add(reader(ref ctx));
}
}
SegmentedBufferHelper.PopLimit(ref ctx.state, oldLimit);
}
// Empty packed field. Odd, but valid - just ignore.
}
else
{
// Not packed... (possibly not packable)
do
{
Add(reader(ref ctx));
} while (ParsingPrimitives.MaybeConsumeTag(ref ctx.buffer, ref ctx.state, tag));
}
}
/// <summary>
/// Calculates the size of this collection based on the given codec.
/// </summary>
/// <param name="codec">The codec to use when encoding each field.</param>
/// <returns>The number of bytes that would be written to an output by one of the <c>WriteTo</c> methods,
/// using the same codec.</returns>
public int CalculateSize(FieldCodec<T> codec)
{
if (count == 0)
{
return 0;
}
uint tag = codec.Tag;
if (codec.PackedRepeatedField)
{
int dataSize = CalculatePackedDataSize(codec);
return CodedOutputStream.ComputeRawVarint32Size(tag) +
CodedOutputStream.ComputeLengthSize(dataSize) +
dataSize;
}
else
{
var sizeCalculator = codec.ValueSizeCalculator;
int size = count * CodedOutputStream.ComputeRawVarint32Size(tag);
if (codec.EndTag != 0)
{
size += count * CodedOutputStream.ComputeRawVarint32Size(codec.EndTag);
}
for (int i = 0; i < count; i++)
{
size += sizeCalculator(array[i]);
}
return size;
}
}
private int CalculatePackedDataSize(FieldCodec<T> codec)
{
int fixedSize = codec.FixedSize;
if (fixedSize == 0)
{
var calculator = codec.ValueSizeCalculator;
int tmp = 0;
for (int i = 0; i < count; i++)
{
tmp += calculator(array[i]);
}
return tmp;
}
else
{
return fixedSize * Count;
}
}
/// <summary>
/// Writes the contents of this collection to the given <see cref="CodedOutputStream"/>,
/// encoding each value using the specified codec.
/// </summary>
/// <param name="output">The output stream to write to.</param>
/// <param name="codec">The codec to use when encoding each value.</param>
public void WriteTo(CodedOutputStream output, FieldCodec<T> codec)
{
WriteContext.Initialize(output, out WriteContext ctx);
try
{
WriteTo(ref ctx, codec);
}
finally
{
ctx.CopyStateTo(output);
}
}
/// <summary>
/// Writes the contents of this collection to the given write context,
/// encoding each value using the specified codec.
/// </summary>
/// <param name="ctx">The write context to write to.</param>
/// <param name="codec">The codec to use when encoding each value.</param>
[SecuritySafeCritical]
public void WriteTo(ref WriteContext ctx, FieldCodec<T> codec)
{
if (count == 0)
{
return;
}
var writer = codec.ValueWriter;
var tag = codec.Tag;
if (codec.PackedRepeatedField)
{
// Packed primitive type
int size = CalculatePackedDataSize(codec);
ctx.WriteTag(tag);
ctx.WriteLength(size);
for (int i = 0; i < count; i++)
{
writer(ref ctx, array[i]);
}
}
else
{
// Not packed: a simple tag/value pair for each value.
// Can't use codec.WriteTagAndValue, as that omits default values.
for (int i = 0; i < count; i++)
{
ctx.WriteTag(tag);
writer(ref ctx, array[i]);
if (codec.EndTag != 0)
{
ctx.WriteTag(codec.EndTag);
}
}
}
}
/// <summary>
/// Gets and sets the capacity of the RepeatedField's internal array.
/// When set, the internal array is reallocated to the given capacity.
/// <exception cref="ArgumentOutOfRangeException">The new value is less than <see cref="Count"/>.</exception>
/// </summary>
public int Capacity
{
get { return array.Length; }
set
{
if (value < count)
{
throw new ArgumentOutOfRangeException("Capacity", value,
$"Cannot set Capacity to a value smaller than the current item count, {count}");
}
if (value >= 0 && value != array.Length)
{
SetSize(value);
}
}
}
// May increase the size of the internal array, but will never shrink it.
private void EnsureSize(int size)
{
if (array.Length < size)
{
size = Math.Max(size, MinArraySize);
int newSize = Math.Max(array.Length * 2, size);
SetSize(newSize);
}
}
// Sets the internal array to an exact size.
private void SetSize(int size)
{
if (size != array.Length)
{
var tmp = new T[size];
Array.Copy(array, 0, tmp, 0, count);
array = tmp;
}
}
/// <summary>
/// Adds the specified item to the collection.
/// </summary>
/// <param name="item">The item to add.</param>
public void Add(T item)
{
ProtoPreconditions.CheckNotNullUnconstrained(item, nameof(item));
EnsureSize(count + 1);
array[count++] = item;
}
/// <summary>
/// Removes all items from the collection.
/// </summary>
public void Clear()
{
// Clear the content of the array (so that any objects it referred to can be garbage collected)
// but keep the capacity the same. This allows large repeated fields to be reused without
// array reallocation.
Array.Clear(array, 0, count);
count = 0;
}
/// <summary>
/// Determines whether this collection contains the given item.
/// </summary>
/// <param name="item">The item to find.</param>
/// <returns><c>true</c> if this collection contains the given item; <c>false</c> otherwise.</returns>
public bool Contains(T item) => IndexOf(item) != -1;
/// <summary>
/// Copies this collection to the given array.
/// </summary>
/// <param name="array">The array to copy to.</param>
/// <param name="arrayIndex">The first index of the array to copy to.</param>
public void CopyTo(T[] array, int arrayIndex)
{
Array.Copy(this.array, 0, array, arrayIndex, count);
}
/// <summary>
/// Removes the specified item from the collection
/// </summary>
/// <param name="item">The item to remove.</param>
/// <returns><c>true</c> if the item was found and removed; <c>false</c> otherwise.</returns>
public bool Remove(T item)
{
int index = IndexOf(item);
if (index == -1)
{
return false;
}
Array.Copy(array, index + 1, array, index, count - index - 1);
count--;
array[count] = default;
return true;
}
/// <summary>
/// Gets the number of elements contained in the collection.
/// </summary>
public int Count => count;
/// <summary>
/// Gets a value indicating whether the collection is read-only.
/// </summary>
public bool IsReadOnly => false;
/// <summary>
/// Adds all of the specified values into this collection.
/// </summary>
/// <param name="values">The values to add to this collection.</param>
public void AddRange(IEnumerable<T> values)
{
ProtoPreconditions.CheckNotNull(values, nameof(values));
// Optimization 1: If the collection we're adding is already a RepeatedField<T>,
// we know the values are valid.
if (values is RepeatedField<T> otherRepeatedField)
{
EnsureSize(count + otherRepeatedField.count);
Array.Copy(otherRepeatedField.array, 0, array, count, otherRepeatedField.count);
count += otherRepeatedField.count;
return;
}
// Optimization 2: The collection is an ICollection, so we can expand
// just once and ask the collection to copy itself into the array.
if (values is ICollection collection)
{
var extraCount = collection.Count;
// For reference types and nullable value types, we need to check that there are no nulls
// present. (This isn't a thread-safe approach, but we don't advertise this is thread-safe.)
// We expect the JITter to optimize this test to true/false, so it's effectively conditional
// specialization.
if (default(T) == null)
{
// TODO: Measure whether iterating once to check and then letting the collection copy
// itself is faster or slower than iterating and adding as we go. For large
// collections this will not be great in terms of cache usage... but the optimized
// copy may be significantly faster than doing it one at a time.
foreach (var item in collection)
{
if (item == null)
{
throw new ArgumentException("Sequence contained null element", nameof(values));
}
}
}
EnsureSize(count + extraCount);
collection.CopyTo(array, count);
count += extraCount;
return;
}
// We *could* check for ICollection<T> as well, but very very few collections implement
// ICollection<T> but not ICollection. (HashSet<T> does, for one...)
// Fall back to a slower path of adding items one at a time.
foreach (T item in values)
{
Add(item);
}
}
/// <summary>
/// Adds all of the specified values into this collection. This method is present to
/// allow repeated fields to be constructed from queries within collection initializers.
/// Within non-collection-initializer code, consider using the equivalent <see cref="AddRange"/>
/// method instead for clarity.
/// </summary>
/// <param name="values">The values to add to this collection.</param>
public void Add(IEnumerable<T> values)
{
AddRange(values);
}
/// <summary>
/// Returns an enumerator that iterates through the collection.
/// </summary>
/// <returns>
/// An enumerator that can be used to iterate through the collection.
/// </returns>
public IEnumerator<T> GetEnumerator()
{
for (int i = 0; i < count; i++)
{
yield return array[i];
}
}
/// <summary>
/// Determines whether the specified <see cref="System.Object" />, is equal to this instance.
/// </summary>
/// <param name="obj">The <see cref="System.Object" /> to compare with this instance.</param>
/// <returns>
/// <c>true</c> if the specified <see cref="System.Object" /> is equal to this instance; otherwise, <c>false</c>.
/// </returns>
public override bool Equals(object obj) => Equals(obj as RepeatedField<T>);
/// <summary>
/// Returns an enumerator that iterates through a collection.
/// </summary>
/// <returns>
/// An <see cref="T:System.Collections.IEnumerator" /> object that can be used to iterate through the collection.
/// </returns>
IEnumerator IEnumerable.GetEnumerator() => GetEnumerator();
/// <summary>
/// Returns a hash code for this instance.
/// </summary>
/// <returns>
/// A hash code for this instance, suitable for use in hashing algorithms and data structures like a hash table.
/// </returns>
public override int GetHashCode()
{
int hash = 0;
for (int i = 0; i < count; i++)
{
hash = hash * 31 + array[i].GetHashCode();
}
return hash;
}
/// <summary>
/// Compares this repeated field with another for equality.
/// </summary>
/// <param name="other">The repeated field to compare this with.</param>
/// <returns><c>true</c> if <paramref name="other"/> refers to an equal repeated field; <c>false</c> otherwise.</returns>
public bool Equals(RepeatedField<T> other)
{
if (other is null)
{
return false;
}
if (ReferenceEquals(other, this))
{
return true;
}
if (other.Count != this.Count)
{
return false;
}
EqualityComparer<T> comparer = EqualityComparer;
for (int i = 0; i < count; i++)
{
if (!comparer.Equals(array[i], other.array[i]))
{
return false;
}
}
return true;
}
/// <summary>
/// Returns the index of the given item within the collection, or -1 if the item is not
/// present.
/// </summary>
/// <param name="item">The item to find in the collection.</param>
/// <returns>The zero-based index of the item, or -1 if it is not found.</returns>
public int IndexOf(T item)
{
ProtoPreconditions.CheckNotNullUnconstrained(item, nameof(item));
EqualityComparer<T> comparer = EqualityComparer;
for (int i = 0; i < count; i++)
{
if (comparer.Equals(array[i], item))
{
return i;
}
}
return -1;
}
/// <summary>
/// Inserts the given item at the specified index.
/// </summary>
/// <param name="index">The index at which to insert the item.</param>
/// <param name="item">The item to insert.</param>
public void Insert(int index, T item)
{
ProtoPreconditions.CheckNotNullUnconstrained(item, nameof(item));
if (index < 0 || index > count)
{
throw new ArgumentOutOfRangeException(nameof(index));
}
EnsureSize(count + 1);
Array.Copy(array, index, array, index + 1, count - index);
array[index] = item;
count++;
}
/// <summary>
/// Removes the item at the given index.
/// </summary>
/// <param name="index">The zero-based index of the item to remove.</param>
public void RemoveAt(int index)
{
if (index < 0 || index >= count)
{
throw new ArgumentOutOfRangeException(nameof(index));
}
Array.Copy(array, index + 1, array, index, count - index - 1);
count--;
array[count] = default;
}
/// <summary>
/// Returns a string representation of this repeated field, in the same
/// way as it would be represented by the default JSON formatter.
/// </summary>
public override string ToString()
{
var writer = new StringWriter();
JsonFormatter.Default.WriteList(writer, this);
return writer.ToString();
}
/// <summary>
/// Gets or sets the item at the specified index.
/// </summary>
/// <value>
/// The element at the specified index.
/// </value>
/// <param name="index">The zero-based index of the element to get or set.</param>
/// <returns>The item at the specified index.</returns>
public T this[int index]
{
get
{
if (index < 0 || index >= count)
{
throw new ArgumentOutOfRangeException(nameof(index));
}
return array[index];
}
set
{
if (index < 0 || index >= count)
{
throw new ArgumentOutOfRangeException(nameof(index));
}
ProtoPreconditions.CheckNotNullUnconstrained(value, nameof(value));
array[index] = value;
}
}
#region Explicit interface implementation for IList and ICollection.
bool IList.IsFixedSize => false;
void ICollection.CopyTo(Array array, int index) => Array.Copy(this.array, 0, array, index, count);
bool ICollection.IsSynchronized => false;
object ICollection.SyncRoot => this;
object IList.this[int index]
{
get => this[index];
set => this[index] = (T)value;
}
int IList.Add(object value)
{
Add((T) value);
return count - 1;
}
bool IList.Contains(object value) => (value is T t && Contains(t));
int IList.IndexOf(object value) => (value is T t) ? IndexOf(t) : -1;
void IList.Insert(int index, object value) => Insert(index, (T) value);
void IList.Remove(object value)
{
if (value is T t)
{
Remove(t);
}
}
#endregion
private sealed class RepeatedFieldDebugView
{
private readonly RepeatedField<T> list;
public RepeatedFieldDebugView(RepeatedField<T> list)
{
this.list = list;
}
[DebuggerBrowsable(DebuggerBrowsableState.RootHidden)]
public T[] Items => list.ToArray();
}
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 55cfb2cd3d75f4e419da148fa0ae66fd
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: 583ff6b853cb72746af3c83c8629a6bb
folderAsset: yes
DefaultImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,104 @@
#region Copyright notice and license
// Protocol Buffers - Google's data interchange format
// Copyright 2015 Google Inc. All rights reserved.
//
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file or at
// https://developers.google.com/open-source/licenses/bsd
#endregion
#if !NET5_0_OR_GREATER
// Copied with permission from https://github.com/dotnet/runtime/tree/8fbf206d0e518b45ca855832e8bfb391afa85972/src/libraries/System.Private.CoreLib/src/System/Diagnostics/CodeAnalysis
namespace System.Diagnostics.CodeAnalysis
{
/// <summary>
/// Specifies the types of members that are dynamically accessed.
///
/// This enumeration has a <see cref="FlagsAttribute"/> attribute that allows a
/// bitwise combination of its member values.
/// </summary>
[Flags]
internal enum DynamicallyAccessedMemberTypes
{
/// <summary>
/// Specifies no members.
/// </summary>
None = 0,
/// <summary>
/// Specifies the default, parameterless public constructor.
/// </summary>
PublicParameterlessConstructor = 0x0001,
/// <summary>
/// Specifies all public constructors.
/// </summary>
PublicConstructors = 0x0002 | PublicParameterlessConstructor,
/// <summary>
/// Specifies all non-public constructors.
/// </summary>
NonPublicConstructors = 0x0004,
/// <summary>
/// Specifies all public methods.
/// </summary>
PublicMethods = 0x0008,
/// <summary>
/// Specifies all non-public methods.
/// </summary>
NonPublicMethods = 0x0010,
/// <summary>
/// Specifies all public fields.
/// </summary>
PublicFields = 0x0020,
/// <summary>
/// Specifies all non-public fields.
/// </summary>
NonPublicFields = 0x0040,
/// <summary>
/// Specifies all public nested types.
/// </summary>
PublicNestedTypes = 0x0080,
/// <summary>
/// Specifies all non-public nested types.
/// </summary>
NonPublicNestedTypes = 0x0100,
/// <summary>
/// Specifies all public properties.
/// </summary>
PublicProperties = 0x0200,
/// <summary>
/// Specifies all non-public properties.
/// </summary>
NonPublicProperties = 0x0400,
/// <summary>
/// Specifies all public events.
/// </summary>
PublicEvents = 0x0800,
/// <summary>
/// Specifies all non-public events.
/// </summary>
NonPublicEvents = 0x1000,
/// <summary>
/// Specifies all interfaces implemented by the type.
/// </summary>
Interfaces = 0x2000,
/// <summary>
/// Specifies all members.
/// </summary>
All = ~None
}
}
#endif

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: de96bce00062d994091bf37244d982b1
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,60 @@
#region Copyright notice and license
// Protocol Buffers - Google's data interchange format
// Copyright 2015 Google Inc. All rights reserved.
//
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file or at
// https://developers.google.com/open-source/licenses/bsd
#endregion
#if !NET5_0_OR_GREATER
// Copied with permission from https://github.com/dotnet/runtime/tree/8fbf206d0e518b45ca855832e8bfb391afa85972/src/libraries/System.Private.CoreLib/src/System/Diagnostics/CodeAnalysis
namespace System.Diagnostics.CodeAnalysis
{
/// <summary>
/// Indicates that certain members on a specified <see cref="Type"/> are accessed dynamically,
/// for example through <see cref="System.Reflection"/>.
/// </summary>
/// <remarks>
/// This allows tools to understand which members are being accessed during the execution
/// of a program.
///
/// This attribute is valid on members whose type is <see cref="Type"/> or <see cref="string"/>.
///
/// When this attribute is applied to a location of type <see cref="string"/>, the assumption is
/// that the string represents a fully qualified type name.
///
/// When this attribute is applied to a class, interface, or struct, the members specified
/// can be accessed dynamically on <see cref="Type"/> instances returned from calling
/// <see cref="object.GetType"/> on instances of that class, interface, or struct.
///
/// If the attribute is applied to a method it's treated as a special case and it implies
/// the attribute should be applied to the "this" parameter of the method. As such the attribute
/// should only be used on instance methods of types assignable to System.Type (or string, but no methods
/// will use it there).
/// </remarks>
[AttributeUsage(
AttributeTargets.Field | AttributeTargets.ReturnValue | AttributeTargets.GenericParameter |
AttributeTargets.Parameter | AttributeTargets.Property | AttributeTargets.Method |
AttributeTargets.Class | AttributeTargets.Interface | AttributeTargets.Struct,
Inherited = false)]
internal sealed class DynamicallyAccessedMembersAttribute : Attribute
{
/// <summary>
/// Initializes a new instance of the <see cref="DynamicallyAccessedMembersAttribute"/> class
/// with the specified member types.
/// </summary>
/// <param name="memberTypes">The types of members dynamically accessed.</param>
public DynamicallyAccessedMembersAttribute(DynamicallyAccessedMemberTypes memberTypes)
{
MemberTypes = memberTypes;
}
/// <summary>
/// Gets the <see cref="DynamicallyAccessedMemberTypes"/> which specifies the type
/// of members dynamically accessed.
/// </summary>
public DynamicallyAccessedMemberTypes MemberTypes { get; }
}
}
#endif

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 9f420c540cf43774cbea2746e7afaeb2
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,41 @@
#region Copyright notice and license
// Protocol Buffers - Google's data interchange format
// Copyright 2015 Google Inc. All rights reserved.
//
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file or at
// https://developers.google.com/open-source/licenses/bsd
#endregion
using System.Reflection;
namespace Google.Protobuf.Compatibility
{
/// <summary>
/// Extension methods for <see cref="PropertyInfo"/>, effectively providing
/// the familiar members from previous desktop framework versions while
/// targeting the newer releases, .NET Core etc.
/// </summary>
internal static class PropertyInfoExtensions
{
/// <summary>
/// Returns the public getter of a property, or null if there is no such getter
/// (either because it's read-only, or the getter isn't public).
/// </summary>
internal static MethodInfo GetGetMethod(this PropertyInfo target)
{
var method = target.GetMethod;
return method != null && method.IsPublic ? method : null;
}
/// <summary>
/// Returns the public setter of a property, or null if there is no such setter
/// (either because it's write-only, or the setter isn't public).
/// </summary>
internal static MethodInfo GetSetMethod(this PropertyInfo target)
{
var method = target.SetMethod;
return method != null && method.IsPublic ? method : null;
}
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 7002cae484f81924ea0a42e4ff99e150
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,49 @@
#region Copyright notice and license
// Protocol Buffers - Google's data interchange format
// Copyright 2015 Google Inc. All rights reserved.
//
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file or at
// https://developers.google.com/open-source/licenses/bsd
#endregion
#if !NET5_0_OR_GREATER
// Copied with permission from https://github.com/dotnet/runtime/tree/8fbf206d0e518b45ca855832e8bfb391afa85972/src/libraries/System.Private.CoreLib/src/System/Diagnostics/CodeAnalysis
namespace System.Diagnostics.CodeAnalysis
{
/// <summary>
/// Indicates that the specified method requires dynamic access to code that is not referenced
/// statically, for example through <see cref="System.Reflection"/>.
/// </summary>
/// <remarks>
/// This allows tools to understand which methods are unsafe to call when removing unreferenced
/// code from an application.
/// </remarks>
[AttributeUsage(AttributeTargets.Method | AttributeTargets.Constructor | AttributeTargets.Class, Inherited = false)]
internal sealed class RequiresUnreferencedCodeAttribute : Attribute
{
/// <summary>
/// Initializes a new instance of the <see cref="RequiresUnreferencedCodeAttribute"/> class
/// with the specified message.
/// </summary>
/// <param name="message">
/// A message that contains information about the usage of unreferenced code.
/// </param>
public RequiresUnreferencedCodeAttribute(string message)
{
Message = message;
}
/// <summary>
/// Gets a message that contains information about the usage of unreferenced code.
/// </summary>
public string Message { get; }
/// <summary>
/// Gets or sets an optional URL that contains more information about the method,
/// why it requires unreferenced code, and what options a consumer has to deal with it.
/// </summary>
public string Url { get; set; }
}
}
#endif

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 94243b97ba117154f8bcbaddfe4cac52
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,90 @@
#region Copyright notice and license
// Protocol Buffers - Google's data interchange format
// Copyright 2015 Google Inc. All rights reserved.
//
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file or at
// https://developers.google.com/open-source/licenses/bsd
#endregion
using System;
using System.Diagnostics.CodeAnalysis;
using System.Reflection;
namespace Google.Protobuf.Compatibility
{
/// <summary>
/// Provides extension methods on Type that just proxy to TypeInfo.
/// These are used to support the new type system from .NET 4.5, without
/// having calls to GetTypeInfo all over the place. While the methods here are meant to be
/// broadly compatible with the desktop framework, there are some subtle differences in behaviour - but
/// they're not expected to affect our use cases. While the class is internal, that should be fine: we can
/// evaluate each new use appropriately.
/// </summary>
internal static class TypeExtensions
{
/// <summary>
/// See https://msdn.microsoft.com/en-us/library/system.type.isassignablefrom
/// </summary>
internal static bool IsAssignableFrom(this Type target, Type c)
{
return target.GetTypeInfo().IsAssignableFrom(c.GetTypeInfo());
}
/// <summary>
/// Returns a representation of the public property associated with the given name in the given type,
/// including inherited properties or null if there is no such public property.
/// Here, "public property" means a property where either the getter, or the setter, or both, is public.
/// </summary>
[UnconditionalSuppressMessage("Trimming", "IL2072",
Justification = "The BaseType of the target will have all properties because of the annotation.")]
internal static PropertyInfo GetProperty(
[DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicProperties | DynamicallyAccessedMemberTypes.NonPublicProperties)]
this Type target, string name)
{
// GetDeclaredProperty only returns properties declared in the given type, so we need to recurse.
while (target != null)
{
var typeInfo = target.GetTypeInfo();
var ret = typeInfo.GetDeclaredProperty(name);
if (ret != null && ((ret.CanRead && ret.GetMethod.IsPublic) || (ret.CanWrite && ret.SetMethod.IsPublic)))
{
return ret;
}
target = typeInfo.BaseType;
}
return null;
}
/// <summary>
/// Returns a representation of the public method associated with the given name in the given type,
/// including inherited methods.
/// </summary>
/// <remarks>
/// This has a few differences compared with Type.GetMethod in the desktop framework. It will throw
/// if there is an ambiguous match even between a private method and a public one, but it *won't* throw
/// if there are two overloads at different levels in the type hierarchy (e.g. class Base declares public void Foo(int) and
/// class Child : Base declares public void Foo(long)).
/// </remarks>
/// <exception cref="AmbiguousMatchException">One type in the hierarchy declared more than one method with the same name</exception>
[UnconditionalSuppressMessage("Trimming", "IL2072",
Justification = "The BaseType of the target will have all properties because of the annotation.")]
internal static MethodInfo GetMethod(
[DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicMethods | DynamicallyAccessedMemberTypes.NonPublicMethods)]
this Type target, string name)
{
// GetDeclaredMethod only returns methods declared in the given type, so we need to recurse.
while (target != null)
{
var typeInfo = target.GetTypeInfo();
var ret = typeInfo.GetDeclaredMethod(name);
if (ret != null && ret.IsPublic)
{
return ret;
}
target = typeInfo.BaseType;
}
return null;
}
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: f34d571813f30754dbd5039e75c3e61d
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,94 @@
#region Copyright notice and license
// Protocol Buffers - Google's data interchange format
// Copyright 2015 Google Inc. All rights reserved.
//
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file or at
// https://developers.google.com/open-source/licenses/bsd
#endregion
#if !NET5_0_OR_GREATER
// Copied with permission from https://github.com/dotnet/runtime/tree/8fbf206d0e518b45ca855832e8bfb391afa85972/src/libraries/System.Private.CoreLib/src/System/Diagnostics/CodeAnalysis
namespace System.Diagnostics.CodeAnalysis
{
/// <summary>
/// Suppresses reporting of a specific rule violation, allowing multiple suppressions on a
/// single code artifact.
/// </summary>
/// <remarks>
/// <see cref="UnconditionalSuppressMessageAttribute"/> is different than
/// <see cref="SuppressMessageAttribute"/> in that it doesn't have a
/// <see cref="ConditionalAttribute"/>. So it is always preserved in the compiled assembly.
/// </remarks>
[AttributeUsage(AttributeTargets.All, Inherited = false, AllowMultiple = true)]
internal sealed class UnconditionalSuppressMessageAttribute : Attribute
{
/// <summary>
/// Initializes a new instance of the <see cref="UnconditionalSuppressMessageAttribute"/>
/// class, specifying the category of the tool and the identifier for an analysis rule.
/// </summary>
/// <param name="category">The category for the attribute.</param>
/// <param name="checkId">The identifier of the analysis rule the attribute applies to.</param>
public UnconditionalSuppressMessageAttribute(string category, string checkId)
{
Category = category;
CheckId = checkId;
}
/// <summary>
/// Gets the category identifying the classification of the attribute.
/// </summary>
/// <remarks>
/// The <see cref="Category"/> property describes the tool or tool analysis category
/// for which a message suppression attribute applies.
/// </remarks>
public string Category { get; }
/// <summary>
/// Gets the identifier of the analysis tool rule to be suppressed.
/// </summary>
/// <remarks>
/// Concatenated together, the <see cref="Category"/> and <see cref="CheckId"/>
/// properties form a unique check identifier.
/// </remarks>
public string CheckId { get; }
/// <summary>
/// Gets or sets the scope of the code that is relevant for the attribute.
/// </summary>
/// <remarks>
/// The Scope property is an optional argument that specifies the metadata scope for which
/// the attribute is relevant.
/// </remarks>
public string Scope { get; set; }
/// <summary>
/// Gets or sets a fully qualified path that represents the target of the attribute.
/// </summary>
/// <remarks>
/// The <see cref="Target"/> property is an optional argument identifying the analysis target
/// of the attribute. An example value is "System.IO.Stream.ctor():System.Void".
/// Because it is fully qualified, it can be long, particularly for targets such as parameters.
/// The analysis tool user interface should be capable of automatically formatting the parameter.
/// </remarks>
public string Target { get; set; }
/// <summary>
/// Gets or sets an optional argument expanding on exclusion criteria.
/// </summary>
/// <remarks>
/// The <see cref="MessageId "/> property is an optional argument that specifies additional
/// exclusion where the literal metadata target is not sufficiently precise. For example,
/// the <see cref="UnconditionalSuppressMessageAttribute"/> cannot be applied within a method,
/// and it may be desirable to suppress a violation against a statement in the method that will
/// give a rule violation, but not against all statements in the method.
/// </remarks>
public string MessageId { get; set; }
/// <summary>
/// Gets or sets the justification for suppressing the code analysis message.
/// </summary>
public string Justification { get; set; }
}
}
#endif

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: feb4123b3282a414c90d6681fae27237
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: 90b05e59ab60a3c45ac3cd31a020e94d
folderAsset: yes
DefaultImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 4a3c61a5c6e34a04eb99c780ac05b978
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,96 @@
#region Copyright notice and license
// Protocol Buffers - Google's data interchange format
// Copyright 2008 Google Inc. All rights reserved.
//
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file or at
// https://developers.google.com/open-source/licenses/bsd
#endregion
using System;
namespace Google.Protobuf
{
/// <summary>
/// Represents a non-generic extension definition. This API is experimental and subject to change.
/// </summary>
public abstract class Extension
{
internal abstract Type TargetType { get; }
/// <summary>
/// Internal use. Creates a new extension with the specified field number.
/// </summary>
protected Extension(int fieldNumber)
{
FieldNumber = fieldNumber;
}
internal abstract IExtensionValue CreateValue();
/// <summary>
/// Gets the field number of this extension
/// </summary>
public int FieldNumber { get; }
internal abstract bool IsRepeated { get; }
}
/// <summary>
/// Represents a type-safe extension identifier used for getting and setting single extension values in <see cref="IExtendableMessage{T}"/> instances.
/// This API is experimental and subject to change.
/// </summary>
/// <typeparam name="TTarget">The message type this field applies to</typeparam>
/// <typeparam name="TValue">The field value type of this extension</typeparam>
public sealed class Extension<TTarget, TValue> : Extension where TTarget : IExtendableMessage<TTarget>
{
private readonly FieldCodec<TValue> codec;
/// <summary>
/// Creates a new extension identifier with the specified field number and codec
/// </summary>
public Extension(int fieldNumber, FieldCodec<TValue> codec) : base(fieldNumber)
{
this.codec = codec;
}
internal TValue DefaultValue => codec != null ? codec.DefaultValue : default;
internal override Type TargetType => typeof(TTarget);
internal override bool IsRepeated => false;
internal override IExtensionValue CreateValue()
{
return new ExtensionValue<TValue>(codec);
}
}
/// <summary>
/// Represents a type-safe extension identifier used for getting repeated extension values in <see cref="IExtendableMessage{T}"/> instances.
/// This API is experimental and subject to change.
/// </summary>
/// <typeparam name="TTarget">The message type this field applies to</typeparam>
/// <typeparam name="TValue">The repeated field value type of this extension</typeparam>
public sealed class RepeatedExtension<TTarget, TValue> : Extension where TTarget : IExtendableMessage<TTarget>
{
private readonly FieldCodec<TValue> codec;
/// <summary>
/// Creates a new repeated extension identifier with the specified field number and codec
/// </summary>
public RepeatedExtension(int fieldNumber, FieldCodec<TValue> codec) : base(fieldNumber)
{
this.codec = codec;
}
internal override Type TargetType => typeof(TTarget);
internal override bool IsRepeated => true;
internal override IExtensionValue CreateValue()
{
return new RepeatedExtensionValue<TValue>(codec);
}
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: c0f4f2e1ac71991488403d126ab9954f
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,161 @@
#region Copyright notice and license
// Protocol Buffers - Google's data interchange format
// Copyright 2008 Google Inc. All rights reserved.
//
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file or at
// https://developers.google.com/open-source/licenses/bsd
#endregion
using System;
using System.Collections;
using System.Collections.Generic;
using System.Linq;
namespace Google.Protobuf
{
/// <summary>
/// Provides extensions to messages while parsing. This API is experimental and subject to change.
/// </summary>
public sealed class ExtensionRegistry : ICollection<Extension>, IDeepCloneable<ExtensionRegistry>
{
internal sealed class ExtensionComparer : IEqualityComparer<Extension>
{
public bool Equals(Extension a, Extension b)
{
return new ObjectIntPair<Type>(a.TargetType, a.FieldNumber).Equals(new ObjectIntPair<Type>(b.TargetType, b.FieldNumber));
}
public int GetHashCode(Extension a)
{
return new ObjectIntPair<Type>(a.TargetType, a.FieldNumber).GetHashCode();
}
internal static ExtensionComparer Instance = new ExtensionComparer();
}
private readonly IDictionary<ObjectIntPair<Type>, Extension> extensions;
/// <summary>
/// Creates a new empty extension registry
/// </summary>
public ExtensionRegistry()
{
extensions = new Dictionary<ObjectIntPair<Type>, Extension>();
}
private ExtensionRegistry(IDictionary<ObjectIntPair<Type>, Extension> collection)
{
extensions = collection.ToDictionary(k => k.Key, v => v.Value);
}
/// <summary>
/// Gets the total number of extensions in this extension registry
/// </summary>
public int Count => extensions.Count;
/// <summary>
/// Returns whether the registry is readonly
/// </summary>
bool ICollection<Extension>.IsReadOnly => false;
internal bool ContainsInputField(uint lastTag, Type target, out Extension extension)
{
return extensions.TryGetValue(new ObjectIntPair<Type>(target, WireFormat.GetTagFieldNumber(lastTag)), out extension);
}
/// <summary>
/// Adds the specified extension to the registry
/// </summary>
public void Add(Extension extension)
{
ProtoPreconditions.CheckNotNull(extension, nameof(extension));
extensions.Add(new ObjectIntPair<Type>(extension.TargetType, extension.FieldNumber), extension);
}
/// <summary>
/// Adds the specified extensions to the registry
/// </summary>
public void AddRange(IEnumerable<Extension> extensions)
{
ProtoPreconditions.CheckNotNull(extensions, nameof(extensions));
foreach (var extension in extensions)
{
Add(extension);
}
}
/// <summary>
/// Clears the registry of all values
/// </summary>
public void Clear()
{
extensions.Clear();
}
/// <summary>
/// Gets whether the extension registry contains the specified extension
/// </summary>
public bool Contains(Extension item)
{
ProtoPreconditions.CheckNotNull(item, nameof(item));
return extensions.ContainsKey(new ObjectIntPair<Type>(item.TargetType, item.FieldNumber));
}
/// <summary>
/// Copies the arrays in the registry set to the specified array at the specified index
/// </summary>
/// <param name="array">The array to copy to</param>
/// <param name="arrayIndex">The array index to start at</param>
void ICollection<Extension>.CopyTo(Extension[] array, int arrayIndex)
{
ProtoPreconditions.CheckNotNull(array, nameof(array));
if (arrayIndex < 0 || arrayIndex >= array.Length)
{
throw new ArgumentOutOfRangeException(nameof(arrayIndex));
}
if (array.Length - arrayIndex < Count)
{
throw new ArgumentException("The provided array is shorter than the number of elements in the registry");
}
for (int i = 0; i < array.Length; i++)
{
Extension extension = array[i];
extensions.Add(new ObjectIntPair<Type>(extension.TargetType, extension.FieldNumber), extension);
}
}
/// <summary>
/// Returns an enumerator to enumerate through the items in the registry
/// </summary>
/// <returns>Returns an enumerator for the extensions in this registry</returns>
public IEnumerator<Extension> GetEnumerator()
{
return extensions.Values.GetEnumerator();
}
/// <summary>
/// Removes the specified extension from the set
/// </summary>
/// <param name="item">The extension</param>
/// <returns><c>true</c> if the extension was removed, otherwise <c>false</c></returns>
public bool Remove(Extension item)
{
ProtoPreconditions.CheckNotNull(item, nameof(item));
return extensions.Remove(new ObjectIntPair<Type>(item.TargetType, item.FieldNumber));
}
IEnumerator IEnumerable.GetEnumerator() => GetEnumerator();
/// <summary>
/// Clones the registry into a new registry
/// </summary>
public ExtensionRegistry Clone()
{
return new ExtensionRegistry(extensions);
}
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: ef4f85bd14338fd4cbaf283ca0f4a48e
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,398 @@
#region Copyright notice and license
// Protocol Buffers - Google's data interchange format
// Copyright 2008 Google Inc. All rights reserved.
//
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file or at
// https://developers.google.com/open-source/licenses/bsd
#endregion
using Google.Protobuf.Collections;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Reflection;
using System.Security;
namespace Google.Protobuf
{
/// <summary>
/// Methods for managing <see cref="ExtensionSet{TTarget}"/>s with null checking.
///
/// Most users will not use this class directly and its API is experimental and subject to change.
/// </summary>
public static class ExtensionSet
{
private static bool TryGetValue<TTarget>(ref ExtensionSet<TTarget> set, Extension extension, out IExtensionValue value) where TTarget : IExtendableMessage<TTarget>
{
if (set == null)
{
value = null;
return false;
}
return set.ValuesByNumber.TryGetValue(extension.FieldNumber, out value);
}
/// <summary>
/// Gets the value of the specified extension
/// </summary>
public static TValue Get<TTarget, TValue>(ref ExtensionSet<TTarget> set, Extension<TTarget, TValue> extension) where TTarget : IExtendableMessage<TTarget>
{
if (TryGetValue(ref set, extension, out IExtensionValue value))
{
// The stored ExtensionValue can be a different type to what is being requested.
// This happens when the same extension proto is compiled in different assemblies.
// To allow consuming assemblies to still get the value when the TValue type is
// different, this get method:
// 1. Attempts to cast the value to the expected ExtensionValue<TValue>.
// This is the usual case. It is used first because it avoids possibly boxing the value.
// 2. Fallback to get the value as object from IExtensionValue then casting.
// This allows for someone to specify a TValue of object. They can then convert
// the values to bytes and reparse using expected value.
// 3. If neither of these work, throw a user friendly error that the types aren't compatible.
if (value is ExtensionValue<TValue> extensionValue)
{
return extensionValue.GetValue();
}
else if (value.GetValue() is TValue underlyingValue)
{
return underlyingValue;
}
else
{
var valueType = value.GetType().GetTypeInfo();
if (valueType.IsGenericType && valueType.GetGenericTypeDefinition() == typeof(ExtensionValue<>))
{
var storedType = valueType.GenericTypeArguments[0];
throw new InvalidOperationException(
"The stored extension value has a type of '" + storedType.AssemblyQualifiedName + "'. " +
"This a different from the requested type of '" + typeof(TValue).AssemblyQualifiedName + "'.");
}
else
{
throw new InvalidOperationException("Unexpected extension value type: " + valueType.AssemblyQualifiedName);
}
}
}
else
{
return extension.DefaultValue;
}
}
/// <summary>
/// Gets the value of the specified repeated extension or null if it doesn't exist in this set
/// </summary>
public static RepeatedField<TValue> Get<TTarget, TValue>(ref ExtensionSet<TTarget> set, RepeatedExtension<TTarget, TValue> extension) where TTarget : IExtendableMessage<TTarget>
{
if (TryGetValue(ref set, extension, out IExtensionValue value))
{
if (value is RepeatedExtensionValue<TValue> extensionValue)
{
return extensionValue.GetValue();
}
else
{
var valueType = value.GetType().GetTypeInfo();
if (valueType.IsGenericType && valueType.GetGenericTypeDefinition() == typeof(RepeatedExtensionValue<>))
{
var storedType = valueType.GenericTypeArguments[0];
throw new InvalidOperationException(
"The stored extension value has a type of '" + storedType.AssemblyQualifiedName + "'. " +
"This a different from the requested type of '" + typeof(TValue).AssemblyQualifiedName + "'.");
}
else
{
throw new InvalidOperationException("Unexpected extension value type: " + valueType.AssemblyQualifiedName);
}
}
}
else
{
return null;
}
}
/// <summary>
/// Gets the value of the specified repeated extension, registering it if it doesn't exist
/// </summary>
public static RepeatedField<TValue> GetOrInitialize<TTarget, TValue>(ref ExtensionSet<TTarget> set, RepeatedExtension<TTarget, TValue> extension) where TTarget : IExtendableMessage<TTarget>
{
IExtensionValue value;
if (set == null)
{
value = extension.CreateValue();
set = new ExtensionSet<TTarget>();
set.ValuesByNumber.Add(extension.FieldNumber, value);
}
else
{
if (!set.ValuesByNumber.TryGetValue(extension.FieldNumber, out value))
{
value = extension.CreateValue();
set.ValuesByNumber.Add(extension.FieldNumber, value);
}
}
return ((RepeatedExtensionValue<TValue>)value).GetValue();
}
/// <summary>
/// Sets the value of the specified extension. This will make a new instance of ExtensionSet if the set is null.
/// </summary>
public static void Set<TTarget, TValue>(ref ExtensionSet<TTarget> set, Extension<TTarget, TValue> extension, TValue value) where TTarget : IExtendableMessage<TTarget>
{
ProtoPreconditions.CheckNotNullUnconstrained(value, nameof(value));
IExtensionValue extensionValue;
if (set == null)
{
extensionValue = extension.CreateValue();
set = new ExtensionSet<TTarget>();
set.ValuesByNumber.Add(extension.FieldNumber, extensionValue);
}
else
{
if (!set.ValuesByNumber.TryGetValue(extension.FieldNumber, out extensionValue))
{
extensionValue = extension.CreateValue();
set.ValuesByNumber.Add(extension.FieldNumber, extensionValue);
}
}
((ExtensionValue<TValue>)extensionValue).SetValue(value);
}
/// <summary>
/// Gets whether the value of the specified extension is set
/// </summary>
public static bool Has<TTarget, TValue>(ref ExtensionSet<TTarget> set, Extension<TTarget, TValue> extension) where TTarget : IExtendableMessage<TTarget>
{
return TryGetValue(ref set, extension, out IExtensionValue _);
}
/// <summary>
/// Clears the value of the specified extension
/// </summary>
public static void Clear<TTarget, TValue>(ref ExtensionSet<TTarget> set, Extension<TTarget, TValue> extension) where TTarget : IExtendableMessage<TTarget>
{
if (set == null)
{
return;
}
set.ValuesByNumber.Remove(extension.FieldNumber);
if (set.ValuesByNumber.Count == 0)
{
set = null;
}
}
/// <summary>
/// Clears the value of the specified extension
/// </summary>
public static void Clear<TTarget, TValue>(ref ExtensionSet<TTarget> set, RepeatedExtension<TTarget, TValue> extension) where TTarget : IExtendableMessage<TTarget>
{
if (set == null)
{
return;
}
set.ValuesByNumber.Remove(extension.FieldNumber);
if (set.ValuesByNumber.Count == 0)
{
set = null;
}
}
/// <summary>
/// Tries to merge a field from the coded input, returning true if the field was merged.
/// If the set is null or the field was not otherwise merged, this returns false.
/// </summary>
public static bool TryMergeFieldFrom<TTarget>(ref ExtensionSet<TTarget> set, CodedInputStream stream) where TTarget : IExtendableMessage<TTarget>
{
ParseContext.Initialize(stream, out ParseContext ctx);
try
{
return TryMergeFieldFrom<TTarget>(ref set, ref ctx);
}
finally
{
ctx.CopyStateTo(stream);
}
}
/// <summary>
/// Tries to merge a field from the coded input, returning true if the field was merged.
/// If the set is null or the field was not otherwise merged, this returns false.
/// </summary>
public static bool TryMergeFieldFrom<TTarget>(ref ExtensionSet<TTarget> set, ref ParseContext ctx) where TTarget : IExtendableMessage<TTarget>
{
int lastFieldNumber = WireFormat.GetTagFieldNumber(ctx.LastTag);
if (set != null && set.ValuesByNumber.TryGetValue(lastFieldNumber, out IExtensionValue extensionValue))
{
extensionValue.MergeFrom(ref ctx);
return true;
}
else if (ctx.ExtensionRegistry != null && ctx.ExtensionRegistry.ContainsInputField(ctx.LastTag, typeof(TTarget), out Extension extension))
{
IExtensionValue value = extension.CreateValue();
value.MergeFrom(ref ctx);
set ??= new ExtensionSet<TTarget>();
set.ValuesByNumber.Add(extension.FieldNumber, value);
return true;
}
else
{
return false;
}
}
/// <summary>
/// Merges the second set into the first set, creating a new instance if first is null
/// </summary>
public static void MergeFrom<TTarget>(ref ExtensionSet<TTarget> first, ExtensionSet<TTarget> second) where TTarget : IExtendableMessage<TTarget>
{
if (second == null)
{
return;
}
if (first == null)
{
first = new ExtensionSet<TTarget>();
}
foreach (var pair in second.ValuesByNumber)
{
if (first.ValuesByNumber.TryGetValue(pair.Key, out IExtensionValue value))
{
value.MergeFrom(pair.Value);
}
else
{
var cloned = pair.Value.Clone();
first.ValuesByNumber[pair.Key] = cloned;
}
}
}
/// <summary>
/// Clones the set into a new set. If the set is null, this returns null
/// </summary>
public static ExtensionSet<TTarget> Clone<TTarget>(ExtensionSet<TTarget> set) where TTarget : IExtendableMessage<TTarget>
{
if (set == null)
{
return null;
}
var newSet = new ExtensionSet<TTarget>();
foreach (var pair in set.ValuesByNumber)
{
var cloned = pair.Value.Clone();
newSet.ValuesByNumber[pair.Key] = cloned;
}
return newSet;
}
}
/// <summary>
/// Used for keeping track of extensions in messages.
/// <see cref="IExtendableMessage{T}"/> methods route to this set.
///
/// Most users will not need to use this class directly
/// </summary>
/// <typeparam name="TTarget">The message type that extensions in this set target</typeparam>
public sealed class ExtensionSet<TTarget> where TTarget : IExtendableMessage<TTarget>
{
internal Dictionary<int, IExtensionValue> ValuesByNumber { get; } = new Dictionary<int, IExtensionValue>();
/// <summary>
/// Gets a hash code of the set
/// </summary>
public override int GetHashCode()
{
int ret = typeof(TTarget).GetHashCode();
foreach (KeyValuePair<int, IExtensionValue> field in ValuesByNumber)
{
// Use ^ here to make the field order irrelevant.
int hash = field.Key.GetHashCode() ^ field.Value.GetHashCode();
ret ^= hash;
}
return ret;
}
/// <summary>
/// Returns whether this set is equal to the other object
/// </summary>
public override bool Equals(object other)
{
if (ReferenceEquals(this, other))
{
return true;
}
ExtensionSet<TTarget> otherSet = other as ExtensionSet<TTarget>;
if (ValuesByNumber.Count != otherSet.ValuesByNumber.Count)
{
return false;
}
foreach (var pair in ValuesByNumber)
{
if (!otherSet.ValuesByNumber.TryGetValue(pair.Key, out IExtensionValue secondValue))
{
return false;
}
if (!pair.Value.Equals(secondValue))
{
return false;
}
}
return true;
}
/// <summary>
/// Calculates the size of this extension set
/// </summary>
public int CalculateSize()
{
int size = 0;
foreach (var value in ValuesByNumber.Values)
{
size += value.CalculateSize();
}
return size;
}
/// <summary>
/// Writes the extension values in this set to the output stream
/// </summary>
public void WriteTo(CodedOutputStream stream)
{
WriteContext.Initialize(stream, out WriteContext ctx);
try
{
WriteTo(ref ctx);
}
finally
{
ctx.CopyStateTo(stream);
}
}
/// <summary>
/// Writes the extension values in this set to the write context
/// </summary>
[SecuritySafeCritical]
public void WriteTo(ref WriteContext ctx)
{
foreach (var value in ValuesByNumber.Values)
{
value.WriteTo(ref ctx);
}
}
internal bool IsInitialized()
{
return ValuesByNumber.Values.All(v => v.IsInitialized());
}
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 33d0f348de0f5e04092d3d0661a60e2a
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,206 @@
#region Copyright notice and license
// Protocol Buffers - Google's data interchange format
// Copyright 2008 Google Inc. All rights reserved.
//
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file or at
// https://developers.google.com/open-source/licenses/bsd
#endregion
using Google.Protobuf.Collections;
using System;
namespace Google.Protobuf
{
internal interface IExtensionValue : IEquatable<IExtensionValue>, IDeepCloneable<IExtensionValue>
{
void MergeFrom(ref ParseContext ctx);
void MergeFrom(IExtensionValue value);
void WriteTo(ref WriteContext ctx);
int CalculateSize();
bool IsInitialized();
object GetValue();
}
internal sealed class ExtensionValue<T> : IExtensionValue
{
private T field;
private readonly FieldCodec<T> codec;
internal ExtensionValue(FieldCodec<T> codec)
{
this.codec = codec;
field = codec.DefaultValue;
}
public int CalculateSize()
{
return codec.CalculateUnconditionalSizeWithTag(field);
}
public IExtensionValue Clone()
{
return new ExtensionValue<T>(codec)
{
field = field is IDeepCloneable<T> ? (field as IDeepCloneable<T>).Clone() : field
};
}
public bool Equals(IExtensionValue other)
{
if (ReferenceEquals(this, other))
return true;
return other is ExtensionValue<T>
&& codec.Equals((other as ExtensionValue<T>).codec)
&& Equals(field, (other as ExtensionValue<T>).field);
// we check for equality in the codec since we could have equal field values however the values could be written in different ways
}
public override int GetHashCode()
{
unchecked
{
int hash = 17;
hash = hash * 31 + field.GetHashCode();
hash = hash * 31 + codec.GetHashCode();
return hash;
}
}
public void MergeFrom(ref ParseContext ctx)
{
codec.ValueMerger(ref ctx, ref field);
}
public void MergeFrom(IExtensionValue value)
{
if (value is ExtensionValue<T>)
{
var extensionValue = value as ExtensionValue<T>;
codec.FieldMerger(ref field, extensionValue.field);
}
}
public void WriteTo(ref WriteContext ctx)
{
ctx.WriteTag(codec.Tag);
codec.ValueWriter(ref ctx, field);
if (codec.EndTag != 0)
{
ctx.WriteTag(codec.EndTag);
}
}
public T GetValue() => field;
object IExtensionValue.GetValue() => field;
public void SetValue(T value)
{
field = value;
}
public bool IsInitialized()
{
if (field is IMessage)
{
return (field as IMessage).IsInitialized();
}
else
{
return true;
}
}
}
internal sealed class RepeatedExtensionValue<T> : IExtensionValue
{
private RepeatedField<T> field;
private readonly FieldCodec<T> codec;
internal RepeatedExtensionValue(FieldCodec<T> codec)
{
this.codec = codec;
field = new RepeatedField<T>();
}
public int CalculateSize()
{
return field.CalculateSize(codec);
}
public IExtensionValue Clone()
{
return new RepeatedExtensionValue<T>(codec)
{
field = field.Clone()
};
}
public bool Equals(IExtensionValue other)
{
if (ReferenceEquals(this, other))
return true;
return other is RepeatedExtensionValue<T>
&& field.Equals((other as RepeatedExtensionValue<T>).field)
&& codec.Equals((other as RepeatedExtensionValue<T>).codec);
}
public override int GetHashCode()
{
unchecked
{
int hash = 17;
hash = hash * 31 + field.GetHashCode();
hash = hash * 31 + codec.GetHashCode();
return hash;
}
}
public void MergeFrom(ref ParseContext ctx)
{
field.AddEntriesFrom(ref ctx, codec);
}
public void MergeFrom(IExtensionValue value)
{
if (value is RepeatedExtensionValue<T>)
{
field.Add((value as RepeatedExtensionValue<T>).field);
}
}
public void WriteTo(ref WriteContext ctx)
{
field.WriteTo(ref ctx, codec);
}
public RepeatedField<T> GetValue() => field;
object IExtensionValue.GetValue() => field;
public bool IsInitialized()
{
for (int i = 0; i < field.Count; i++)
{
var element = field[i];
if (element is IMessage)
{
if (!(element as IMessage).IsInitialized())
{
return false;
}
}
else
{
break;
}
}
return true;
}
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: c7e3e0891dbb4ef4795fd30b9ae20e20
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,814 @@
#region Copyright notice and license
// Protocol Buffers - Google's data interchange format
// Copyright 2015 Google Inc. All rights reserved.
//
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file or at
// https://developers.google.com/open-source/licenses/bsd
#endregion
using Google.Protobuf.Collections;
using Google.Protobuf.WellKnownTypes;
using System;
using System.Collections.Generic;
using System.Security;
namespace Google.Protobuf
{
/// <summary>
/// Factory methods for <see cref="FieldCodec{T}"/>.
/// </summary>
public static class FieldCodec
{
// TODO: Avoid the "dual hit" of lambda expressions: create open delegates instead. (At least test...)
/// <summary>
/// Retrieves a codec suitable for a string field with the given tag.
/// </summary>
/// <param name="tag">The tag.</param>
/// <returns>A codec for the given tag.</returns>
public static FieldCodec<string> ForString(uint tag) => ForString(tag, "");
/// <summary>
/// Retrieves a codec suitable for a bytes field with the given tag.
/// </summary>
/// <param name="tag">The tag.</param>
/// <returns>A codec for the given tag.</returns>
public static FieldCodec<ByteString> ForBytes(uint tag) => ForBytes(tag, ByteString.Empty);
/// <summary>
/// Retrieves a codec suitable for a bool field with the given tag.
/// </summary>
/// <param name="tag">The tag.</param>
/// <returns>A codec for the given tag.</returns>
public static FieldCodec<bool> ForBool(uint tag) => ForBool(tag, false);
/// <summary>
/// Retrieves a codec suitable for an int32 field with the given tag.
/// </summary>
/// <param name="tag">The tag.</param>
/// <returns>A codec for the given tag.</returns>
public static FieldCodec<int> ForInt32(uint tag) => ForInt32(tag, 0);
/// <summary>
/// Retrieves a codec suitable for an sint32 field with the given tag.
/// </summary>
/// <param name="tag">The tag.</param>
/// <returns>A codec for the given tag.</returns>
public static FieldCodec<int> ForSInt32(uint tag) => ForSInt32(tag, 0);
/// <summary>
/// Retrieves a codec suitable for a fixed32 field with the given tag.
/// </summary>
/// <param name="tag">The tag.</param>
/// <returns>A codec for the given tag.</returns>
public static FieldCodec<uint> ForFixed32(uint tag) => ForFixed32(tag, 0);
/// <summary>
/// Retrieves a codec suitable for an sfixed32 field with the given tag.
/// </summary>
/// <param name="tag">The tag.</param>
/// <returns>A codec for the given tag.</returns>
public static FieldCodec<int> ForSFixed32(uint tag) => ForSFixed32(tag, 0);
/// <summary>
/// Retrieves a codec suitable for a uint32 field with the given tag.
/// </summary>
/// <param name="tag">The tag.</param>
/// <returns>A codec for the given tag.</returns>
public static FieldCodec<uint> ForUInt32(uint tag) => ForUInt32(tag, 0);
/// <summary>
/// Retrieves a codec suitable for an int64 field with the given tag.
/// </summary>
/// <param name="tag">The tag.</param>
/// <returns>A codec for the given tag.</returns>
public static FieldCodec<long> ForInt64(uint tag) => ForInt64(tag, 0);
/// <summary>
/// Retrieves a codec suitable for an sint64 field with the given tag.
/// </summary>
/// <param name="tag">The tag.</param>
/// <returns>A codec for the given tag.</returns>
public static FieldCodec<long> ForSInt64(uint tag) => ForSInt64(tag, 0);
/// <summary>
/// Retrieves a codec suitable for a fixed64 field with the given tag.
/// </summary>
/// <param name="tag">The tag.</param>
/// <returns>A codec for the given tag.</returns>
public static FieldCodec<ulong> ForFixed64(uint tag) => ForFixed64(tag, 0);
/// <summary>
/// Retrieves a codec suitable for an sfixed64 field with the given tag.
/// </summary>
/// <param name="tag">The tag.</param>
/// <returns>A codec for the given tag.</returns>
public static FieldCodec<long> ForSFixed64(uint tag) => ForSFixed64(tag, 0);
/// <summary>
/// Retrieves a codec suitable for a uint64 field with the given tag.
/// </summary>
/// <param name="tag">The tag.</param>
/// <returns>A codec for the given tag.</returns>
public static FieldCodec<ulong> ForUInt64(uint tag) => ForUInt64(tag, 0);
/// <summary>
/// Retrieves a codec suitable for a float field with the given tag.
/// </summary>
/// <param name="tag">The tag.</param>
/// <returns>A codec for the given tag.</returns>
public static FieldCodec<float> ForFloat(uint tag) => ForFloat(tag, 0);
/// <summary>
/// Retrieves a codec suitable for a double field with the given tag.
/// </summary>
/// <param name="tag">The tag.</param>
/// <returns>A codec for the given tag.</returns>
public static FieldCodec<double> ForDouble(uint tag) => ForDouble(tag, 0);
// Enums are tricky. We can probably use expression trees to build these delegates automatically,
// but it's easy to generate the code for it.
/// <summary>
/// Retrieves a codec suitable for an enum field with the given tag.
/// </summary>
/// <param name="tag">The tag.</param>
/// <param name="toInt32">A conversion function from <see cref="Int32"/> to the enum type.</param>
/// <param name="fromInt32">A conversion function from the enum type to <see cref="Int32"/>.</param>
/// <returns>A codec for the given tag.</returns>
public static FieldCodec<T> ForEnum<T>(uint tag, Func<T, int> toInt32, Func<int, T> fromInt32) =>
ForEnum(tag, toInt32, fromInt32, default);
/// <summary>
/// Retrieves a codec suitable for a string field with the given tag.
/// </summary>
/// <param name="tag">The tag.</param>
/// <param name="defaultValue">The default value.</param>
/// <returns>A codec for the given tag.</returns>
public static FieldCodec<string> ForString(uint tag, string defaultValue)
{
return new FieldCodec<string>((ref ParseContext ctx) => ctx.ReadString(), (ref WriteContext ctx, string value) => ctx.WriteString(value), CodedOutputStream.ComputeStringSize, tag, defaultValue);
}
/// <summary>
/// Retrieves a codec suitable for a bytes field with the given tag.
/// </summary>
/// <param name="tag">The tag.</param>
/// <param name="defaultValue">The default value.</param>
/// <returns>A codec for the given tag.</returns>
public static FieldCodec<ByteString> ForBytes(uint tag, ByteString defaultValue)
{
return new FieldCodec<ByteString>((ref ParseContext ctx) => ctx.ReadBytes(), (ref WriteContext ctx, ByteString value) => ctx.WriteBytes(value), CodedOutputStream.ComputeBytesSize, tag, defaultValue);
}
/// <summary>
/// Retrieves a codec suitable for a bool field with the given tag.
/// </summary>
/// <param name="tag">The tag.</param>
/// <param name="defaultValue">The default value.</param>
/// <returns>A codec for the given tag.</returns>
public static FieldCodec<bool> ForBool(uint tag, bool defaultValue)
{
return new FieldCodec<bool>((ref ParseContext ctx) => ctx.ReadBool(), (ref WriteContext ctx, bool value) => ctx.WriteBool(value), CodedOutputStream.BoolSize, tag, defaultValue);
}
/// <summary>
/// Retrieves a codec suitable for an int32 field with the given tag.
/// </summary>
/// <param name="tag">The tag.</param>
/// <param name="defaultValue">The default value.</param>
/// <returns>A codec for the given tag.</returns>
public static FieldCodec<int> ForInt32(uint tag, int defaultValue)
{
return new FieldCodec<int>((ref ParseContext ctx) => ctx.ReadInt32(), (ref WriteContext output, int value) => output.WriteInt32(value), CodedOutputStream.ComputeInt32Size, tag, defaultValue);
}
/// <summary>
/// Retrieves a codec suitable for an sint32 field with the given tag.
/// </summary>
/// <param name="tag">The tag.</param>
/// <param name="defaultValue">The default value.</param>
/// <returns>A codec for the given tag.</returns>
public static FieldCodec<int> ForSInt32(uint tag, int defaultValue)
{
return new FieldCodec<int>((ref ParseContext ctx) => ctx.ReadSInt32(), (ref WriteContext output, int value) => output.WriteSInt32(value), CodedOutputStream.ComputeSInt32Size, tag, defaultValue);
}
/// <summary>
/// Retrieves a codec suitable for a fixed32 field with the given tag.
/// </summary>
/// <param name="tag">The tag.</param>
/// <param name="defaultValue">The default value.</param>
/// <returns>A codec for the given tag.</returns>
public static FieldCodec<uint> ForFixed32(uint tag, uint defaultValue)
{
return new FieldCodec<uint>((ref ParseContext ctx) => ctx.ReadFixed32(), (ref WriteContext output, uint value) => output.WriteFixed32(value), 4, tag, defaultValue);
}
/// <summary>
/// Retrieves a codec suitable for an sfixed32 field with the given tag.
/// </summary>
/// <param name="tag">The tag.</param>
/// <param name="defaultValue">The default value.</param>
/// <returns>A codec for the given tag.</returns>
public static FieldCodec<int> ForSFixed32(uint tag, int defaultValue)
{
return new FieldCodec<int>((ref ParseContext ctx) => ctx.ReadSFixed32(), (ref WriteContext output, int value) => output.WriteSFixed32(value), 4, tag, defaultValue);
}
/// <summary>
/// Retrieves a codec suitable for a uint32 field with the given tag.
/// </summary>
/// <param name="tag">The tag.</param>
/// <param name="defaultValue">The default value.</param>
/// <returns>A codec for the given tag.</returns>
public static FieldCodec<uint> ForUInt32(uint tag, uint defaultValue)
{
return new FieldCodec<uint>((ref ParseContext ctx) => ctx.ReadUInt32(), (ref WriteContext output, uint value) => output.WriteUInt32(value), CodedOutputStream.ComputeUInt32Size, tag, defaultValue);
}
/// <summary>
/// Retrieves a codec suitable for an int64 field with the given tag.
/// </summary>
/// <param name="tag">The tag.</param>
/// <param name="defaultValue">The default value.</param>
/// <returns>A codec for the given tag.</returns>
public static FieldCodec<long> ForInt64(uint tag, long defaultValue)
{
return new FieldCodec<long>((ref ParseContext ctx) => ctx.ReadInt64(), (ref WriteContext output, long value) => output.WriteInt64(value), CodedOutputStream.ComputeInt64Size, tag, defaultValue);
}
/// <summary>
/// Retrieves a codec suitable for an sint64 field with the given tag.
/// </summary>
/// <param name="tag">The tag.</param>
/// <param name="defaultValue">The default value.</param>
/// <returns>A codec for the given tag.</returns>
public static FieldCodec<long> ForSInt64(uint tag, long defaultValue)
{
return new FieldCodec<long>((ref ParseContext ctx) => ctx.ReadSInt64(), (ref WriteContext output, long value) => output.WriteSInt64(value), CodedOutputStream.ComputeSInt64Size, tag, defaultValue);
}
/// <summary>
/// Retrieves a codec suitable for a fixed64 field with the given tag.
/// </summary>
/// <param name="tag">The tag.</param>
/// <param name="defaultValue">The default value.</param>
/// <returns>A codec for the given tag.</returns>
public static FieldCodec<ulong> ForFixed64(uint tag, ulong defaultValue)
{
return new FieldCodec<ulong>((ref ParseContext ctx) => ctx.ReadFixed64(), (ref WriteContext output, ulong value) => output.WriteFixed64(value), 8, tag, defaultValue);
}
/// <summary>
/// Retrieves a codec suitable for an sfixed64 field with the given tag.
/// </summary>
/// <param name="tag">The tag.</param>
/// <param name="defaultValue">The default value.</param>
/// <returns>A codec for the given tag.</returns>
public static FieldCodec<long> ForSFixed64(uint tag, long defaultValue)
{
return new FieldCodec<long>((ref ParseContext ctx) => ctx.ReadSFixed64(), (ref WriteContext output, long value) => output.WriteSFixed64(value), 8, tag, defaultValue);
}
/// <summary>
/// Retrieves a codec suitable for a uint64 field with the given tag.
/// </summary>
/// <param name="tag">The tag.</param>
/// <param name="defaultValue">The default value.</param>
/// <returns>A codec for the given tag.</returns>
public static FieldCodec<ulong> ForUInt64(uint tag, ulong defaultValue)
{
return new FieldCodec<ulong>((ref ParseContext ctx) => ctx.ReadUInt64(), (ref WriteContext output, ulong value) => output.WriteUInt64(value), CodedOutputStream.ComputeUInt64Size, tag, defaultValue);
}
/// <summary>
/// Retrieves a codec suitable for a float field with the given tag.
/// </summary>
/// <param name="tag">The tag.</param>
/// <param name="defaultValue">The default value.</param>
/// <returns>A codec for the given tag.</returns>
public static FieldCodec<float> ForFloat(uint tag, float defaultValue)
{
return new FieldCodec<float>((ref ParseContext ctx) => ctx.ReadFloat(), (ref WriteContext output, float value) => output.WriteFloat(value), CodedOutputStream.FloatSize, tag, defaultValue);
}
/// <summary>
/// Retrieves a codec suitable for a double field with the given tag.
/// </summary>
/// <param name="tag">The tag.</param>
/// <param name="defaultValue">The default value.</param>
/// <returns>A codec for the given tag.</returns>
public static FieldCodec<double> ForDouble(uint tag, double defaultValue)
{
return new FieldCodec<double>((ref ParseContext ctx) => ctx.ReadDouble(), (ref WriteContext output, double value) => output.WriteDouble(value), CodedOutputStream.DoubleSize, tag, defaultValue);
}
// Enums are tricky. We can probably use expression trees to build these delegates automatically,
// but it's easy to generate the code for it.
/// <summary>
/// Retrieves a codec suitable for an enum field with the given tag.
/// </summary>
/// <param name="tag">The tag.</param>
/// <param name="toInt32">A conversion function from <see cref="Int32"/> to the enum type.</param>
/// <param name="fromInt32">A conversion function from the enum type to <see cref="Int32"/>.</param>
/// <param name="defaultValue">The default value.</param>
/// <returns>A codec for the given tag.</returns>
public static FieldCodec<T> ForEnum<T>(uint tag, Func<T, int> toInt32, Func<int, T> fromInt32, T defaultValue)
{
return new FieldCodec<T>((ref ParseContext ctx) => fromInt32(
ctx.ReadEnum()),
(ref WriteContext output, T value) => output.WriteEnum(toInt32(value)),
value => CodedOutputStream.ComputeEnumSize(toInt32(value)), tag, defaultValue);
}
/// <summary>
/// Retrieves a codec suitable for a message field with the given tag.
/// </summary>
/// <param name="tag">The tag.</param>
/// <param name="parser">A parser to use for the message type.</param>
/// <returns>A codec for the given tag.</returns>
public static FieldCodec<T> ForMessage<T>(uint tag, MessageParser<T> parser) where T : class, IMessage<T>
{
return new FieldCodec<T>(
(ref ParseContext ctx) =>
{
T message = parser.CreateTemplate();
ctx.ReadMessage(message);
return message;
},
(ref WriteContext output, T value) => output.WriteMessage(value),
(ref ParseContext ctx, ref T v) =>
{
if (v == null)
{
v = parser.CreateTemplate();
}
ctx.ReadMessage(v);
},
(ref T v, T v2) =>
{
if (v2 == null)
{
return false;
}
else if (v == null)
{
v = v2.Clone();
}
else
{
v.MergeFrom(v2);
}
return true;
},
message => CodedOutputStream.ComputeMessageSize(message), tag);
}
/// <summary>
/// Retrieves a codec suitable for a group field with the given tag.
/// </summary>
/// <param name="startTag">The start group tag.</param>
/// <param name="endTag">The end group tag.</param>
/// <param name="parser">A parser to use for the group message type.</param>
/// <returns>A codec for given tag</returns>
public static FieldCodec<T> ForGroup<T>(uint startTag, uint endTag, MessageParser<T> parser) where T : class, IMessage<T>
{
return new FieldCodec<T>(
(ref ParseContext ctx) =>
{
T message = parser.CreateTemplate();
ctx.ReadGroup(message);
return message;
},
(ref WriteContext output, T value) => output.WriteGroup(value),
(ref ParseContext ctx, ref T v) =>
{
if (v == null)
{
v = parser.CreateTemplate();
}
ctx.ReadGroup(v);
},
(ref T v, T v2) =>
{
if (v2 == null)
{
return v == null;
}
else if (v == null)
{
v = v2.Clone();
}
else
{
v.MergeFrom(v2);
}
return true;
},
message => CodedOutputStream.ComputeGroupSize(message), startTag, endTag);
}
/// <summary>
/// Creates a codec for a wrapper type of a class - which must be string or ByteString.
/// </summary>
public static FieldCodec<T> ForClassWrapper<T>(uint tag) where T : class
{
var nestedCodec = WrapperCodecs.GetCodec<T>();
return new FieldCodec<T>(
(ref ParseContext ctx) => WrapperCodecs.Read<T>(ref ctx, nestedCodec),
(ref WriteContext output, T value) => WrapperCodecs.Write<T>(ref output, value, nestedCodec),
(ref ParseContext ctx, ref T v) => v = WrapperCodecs.Read<T>(ref ctx, nestedCodec),
(ref T v, T v2) => { v = v2; return v == null; },
value => WrapperCodecs.CalculateSize<T>(value, nestedCodec),
tag, 0,
null); // Default value for the wrapper
}
/// <summary>
/// Creates a codec for a wrapper type of a struct - which must be Int32, Int64, UInt32, UInt64,
/// Bool, Single or Double.
/// </summary>
public static FieldCodec<T?> ForStructWrapper<T>(uint tag) where T : struct
{
var nestedCodec = WrapperCodecs.GetCodec<T>();
return new FieldCodec<T?>(
WrapperCodecs.GetReader<T>(),
(ref WriteContext output, T? value) => WrapperCodecs.Write<T>(ref output, value.Value, nestedCodec),
(ref ParseContext ctx, ref T? v) => v = WrapperCodecs.Read<T>(ref ctx, nestedCodec),
(ref T? v, T? v2) => { if (v2.HasValue) { v = v2; } return v.HasValue; },
value => value == null ? 0 : WrapperCodecs.CalculateSize<T>(value.Value, nestedCodec),
tag, 0,
null); // Default value for the wrapper
}
/// <summary>
/// Helper code to create codecs for wrapper types.
/// </summary>
/// <remarks>
/// Somewhat ugly with all the static methods, but the conversions involved to/from nullable types make it
/// slightly tricky to improve. So long as we keep the public API (ForClassWrapper, ForStructWrapper) in place,
/// we can refactor later if we come up with something cleaner.
/// </remarks>
private static class WrapperCodecs
{
private static readonly Dictionary<System.Type, object> Codecs = new Dictionary<System.Type, object>
{
{ typeof(bool), ForBool(WireFormat.MakeTag(WrappersReflection.WrapperValueFieldNumber, WireFormat.WireType.Varint)) },
{ typeof(int), ForInt32(WireFormat.MakeTag(WrappersReflection.WrapperValueFieldNumber, WireFormat.WireType.Varint)) },
{ typeof(long), ForInt64(WireFormat.MakeTag(WrappersReflection.WrapperValueFieldNumber, WireFormat.WireType.Varint)) },
{ typeof(uint), ForUInt32(WireFormat.MakeTag(WrappersReflection.WrapperValueFieldNumber, WireFormat.WireType.Varint)) },
{ typeof(ulong), ForUInt64(WireFormat.MakeTag(WrappersReflection.WrapperValueFieldNumber, WireFormat.WireType.Varint)) },
{ typeof(float), ForFloat(WireFormat.MakeTag(WrappersReflection.WrapperValueFieldNumber, WireFormat.WireType.Fixed32)) },
{ typeof(double), ForDouble(WireFormat.MakeTag(WrappersReflection.WrapperValueFieldNumber, WireFormat.WireType.Fixed64)) },
{ typeof(string), ForString(WireFormat.MakeTag(WrappersReflection.WrapperValueFieldNumber, WireFormat.WireType.LengthDelimited)) },
{ typeof(ByteString), ForBytes(WireFormat.MakeTag(WrappersReflection.WrapperValueFieldNumber, WireFormat.WireType.LengthDelimited)) }
};
private static readonly Dictionary<System.Type, object> Readers = new Dictionary<System.Type, object>
{
// TODO: Provide more optimized readers.
{ typeof(bool), (ValueReader<bool?>)ParsingPrimitivesWrappers.ReadBoolWrapper },
{ typeof(int), (ValueReader<int?>)ParsingPrimitivesWrappers.ReadInt32Wrapper },
{ typeof(long), (ValueReader<long?>)ParsingPrimitivesWrappers.ReadInt64Wrapper },
{ typeof(uint), (ValueReader<uint?>)ParsingPrimitivesWrappers.ReadUInt32Wrapper },
{ typeof(ulong), (ValueReader<ulong?>)ParsingPrimitivesWrappers.ReadUInt64Wrapper },
{ typeof(float), BitConverter.IsLittleEndian ?
(ValueReader<float?>)ParsingPrimitivesWrappers.ReadFloatWrapperLittleEndian :
(ValueReader<float?>)ParsingPrimitivesWrappers.ReadFloatWrapperSlow },
{ typeof(double), BitConverter.IsLittleEndian ?
(ValueReader<double?>)ParsingPrimitivesWrappers.ReadDoubleWrapperLittleEndian :
(ValueReader<double?>)ParsingPrimitivesWrappers.ReadDoubleWrapperSlow },
// `string` and `ByteString` less performance-sensitive. Do not implement for now.
{ typeof(string), null },
{ typeof(ByteString), null },
};
/// <summary>
/// Returns a field codec which effectively wraps a value of type T in a message.
///
/// </summary>
internal static FieldCodec<T> GetCodec<T>()
{
if (!Codecs.TryGetValue(typeof(T), out object value))
{
throw new InvalidOperationException("Invalid type argument requested for wrapper codec: " + typeof(T));
}
return (FieldCodec<T>) value;
}
internal static ValueReader<T?> GetReader<T>() where T : struct
{
if (!Readers.TryGetValue(typeof(T), out object value))
{
throw new InvalidOperationException("Invalid type argument requested for wrapper reader: " + typeof(T));
}
if (value == null)
{
// Return default unoptimized reader for the wrapper type.
var nestedCoded = GetCodec<T>();
return (ref ParseContext ctx) => Read<T>(ref ctx, nestedCoded);
}
// Return optimized read for the wrapper type.
return (ValueReader<T?>)value;
}
[SecuritySafeCritical]
internal static T Read<T>(ref ParseContext ctx, FieldCodec<T> codec)
{
int length = ctx.ReadLength();
int oldLimit = SegmentedBufferHelper.PushLimit(ref ctx.state, length);
uint tag;
T value = codec.DefaultValue;
while ((tag = ctx.ReadTag()) != 0)
{
if (tag == codec.Tag)
{
value = codec.Read(ref ctx);
}
else
{
ParsingPrimitivesMessages.SkipLastField(ref ctx.buffer, ref ctx.state);
}
}
ParsingPrimitivesMessages.CheckReadEndOfStreamTag(ref ctx.state);
SegmentedBufferHelper.PopLimit(ref ctx.state, oldLimit);
return value;
}
internal static void Write<T>(ref WriteContext ctx, T value, FieldCodec<T> codec)
{
ctx.WriteLength(codec.CalculateSizeWithTag(value));
codec.WriteTagAndValue(ref ctx, value);
}
internal static int CalculateSize<T>(T value, FieldCodec<T> codec)
{
int fieldLength = codec.CalculateSizeWithTag(value);
return CodedOutputStream.ComputeLengthSize(fieldLength) + fieldLength;
}
}
}
internal delegate TValue ValueReader<out TValue>(ref ParseContext ctx);
internal delegate void ValueWriter<T>(ref WriteContext ctx, T value);
/// <summary>
/// <para>
/// An encode/decode pair for a single field. This effectively encapsulates
/// all the information needed to read or write the field value from/to a coded
/// stream.
/// </para>
/// <para>
/// This class is public and has to be as it is used by generated code, but its public
/// API is very limited - just what the generated code needs to call directly.
/// </para>
/// </summary>
/// <remarks>
/// This never writes default values to the stream, and does not address "packedness"
/// in repeated fields itself, other than to know whether or not the field *should* be packed.
/// </remarks>
public sealed class FieldCodec<T>
{
private static readonly EqualityComparer<T> EqualityComparer = ProtobufEqualityComparers.GetEqualityComparer<T>();
private static readonly T DefaultDefault;
// Only non-nullable value types support packing. This is the simplest way of detecting that.
private static readonly bool TypeSupportsPacking = default(T) != null;
/// <summary>
/// Merges an input stream into a value
/// </summary>
internal delegate void InputMerger(ref ParseContext ctx, ref T value);
/// <summary>
/// Merges a value into a reference to another value, returning a boolean if the value was set
/// </summary>
internal delegate bool ValuesMerger(ref T value, T other);
static FieldCodec()
{
if (typeof(T) == typeof(string))
{
DefaultDefault = (T)(object)"";
}
else if (typeof(T) == typeof(ByteString))
{
DefaultDefault = (T)(object)ByteString.Empty;
}
// Otherwise it's the default value of the CLR type
}
internal static bool IsPackedRepeatedField(uint tag) =>
TypeSupportsPacking && WireFormat.GetTagWireType(tag) == WireFormat.WireType.LengthDelimited;
internal bool PackedRepeatedField { get; }
/// <summary>
/// Returns a delegate to write a value (unconditionally) to a coded output stream.
/// </summary>
internal ValueWriter<T> ValueWriter { get; }
/// <summary>
/// Returns the size calculator for just a value.
/// </summary>
internal Func<T, int> ValueSizeCalculator { get; }
/// <summary>
/// Returns a delegate to read a value from a coded input stream. It is assumed that
/// the stream is already positioned on the appropriate tag.
/// </summary>
internal ValueReader<T> ValueReader { get; }
/// <summary>
/// Returns a delegate to merge a value from a coded input stream.
/// It is assumed that the stream is already positioned on the appropriate tag
/// </summary>
internal InputMerger ValueMerger { get; }
/// <summary>
/// Returns a delegate to merge two values together.
/// </summary>
internal ValuesMerger FieldMerger { get; }
/// <summary>
/// Returns the fixed size for an entry, or 0 if sizes vary.
/// </summary>
internal int FixedSize { get; }
/// <summary>
/// Gets the tag of the codec.
/// </summary>
/// <value>
/// The tag of the codec.
/// </value>
internal uint Tag { get; }
/// <summary>
/// Gets the end tag of the codec or 0 if there is no end tag
/// </summary>
/// <value>
/// The end tag of the codec.
/// </value>
internal uint EndTag { get; }
/// <summary>
/// Default value for this codec. Usually the same for every instance of the same type, but
/// for string/ByteString wrapper fields the codec's default value is null, whereas for
/// other string/ByteString fields it's "" or ByteString.Empty.
/// </summary>
/// <value>
/// The default value of the codec's type.
/// </value>
internal T DefaultValue { get; }
private readonly int tagSize;
internal FieldCodec(
ValueReader<T> reader,
ValueWriter<T> writer,
int fixedSize,
uint tag,
T defaultValue) : this(reader, writer, _ => fixedSize, tag, defaultValue)
{
FixedSize = fixedSize;
}
internal FieldCodec(
ValueReader<T> reader,
ValueWriter<T> writer,
Func<T, int> sizeCalculator,
uint tag,
T defaultValue) : this(reader, writer, (ref ParseContext ctx, ref T v) => v = reader(ref ctx), (ref T v, T v2) => { v = v2; return true; }, sizeCalculator, tag, 0, defaultValue)
{
}
internal FieldCodec(
ValueReader<T> reader,
ValueWriter<T> writer,
InputMerger inputMerger,
ValuesMerger valuesMerger,
Func<T, int> sizeCalculator,
uint tag,
uint endTag = 0) : this(reader, writer, inputMerger, valuesMerger, sizeCalculator, tag, endTag, DefaultDefault)
{
}
internal FieldCodec(
ValueReader<T> reader,
ValueWriter<T> writer,
InputMerger inputMerger,
ValuesMerger valuesMerger,
Func<T, int> sizeCalculator,
uint tag,
uint endTag,
T defaultValue)
{
ValueReader = reader;
ValueWriter = writer;
ValueMerger = inputMerger;
FieldMerger = valuesMerger;
ValueSizeCalculator = sizeCalculator;
FixedSize = 0;
Tag = tag;
EndTag = endTag;
DefaultValue = defaultValue;
tagSize = CodedOutputStream.ComputeRawVarint32Size(tag);
if (endTag != 0)
tagSize += CodedOutputStream.ComputeRawVarint32Size(endTag);
// Detect packed-ness once, so we can check for it within RepeatedField<T>.
PackedRepeatedField = IsPackedRepeatedField(tag);
}
/// <summary>
/// Write a tag and the given value, *if* the value is not the default.
/// </summary>
public void WriteTagAndValue(CodedOutputStream output, T value)
{
WriteContext.Initialize(output, out WriteContext ctx);
try
{
WriteTagAndValue(ref ctx, value);
}
finally
{
ctx.CopyStateTo(output);
}
//if (!IsDefault(value))
//{
// output.WriteTag(Tag);
// ValueWriter(output, value);
// if (EndTag != 0)
// {
// output.WriteTag(EndTag);
// }
//}
}
/// <summary>
/// Write a tag and the given value, *if* the value is not the default.
/// </summary>
public void WriteTagAndValue(ref WriteContext ctx, T value)
{
if (!IsDefault(value))
{
ctx.WriteTag(Tag);
ValueWriter(ref ctx, value);
if (EndTag != 0)
{
ctx.WriteTag(EndTag);
}
}
}
/// <summary>
/// Reads a value of the codec type from the given <see cref="CodedInputStream"/>.
/// </summary>
/// <param name="input">The input stream to read from.</param>
/// <returns>The value read from the stream.</returns>
public T Read(CodedInputStream input)
{
ParseContext.Initialize(input, out ParseContext ctx);
try
{
return ValueReader(ref ctx);
}
finally
{
ctx.CopyStateTo(input);
}
}
/// <summary>
/// Reads a value of the codec type from the given <see cref="ParseContext"/>.
/// </summary>
/// <param name="ctx">The parse context to read from.</param>
/// <returns>The value read.</returns>
public T Read(ref ParseContext ctx)
{
return ValueReader(ref ctx);
}
/// <summary>
/// Calculates the size required to write the given value, with a tag,
/// if the value is not the default.
/// </summary>
public int CalculateSizeWithTag(T value) => IsDefault(value) ? 0 : ValueSizeCalculator(value) + tagSize;
/// <summary>
/// Calculates the size required to write the given value, with a tag, even
/// if the value is the default.
/// </summary>
internal int CalculateUnconditionalSizeWithTag(T value) => ValueSizeCalculator(value) + tagSize;
private bool IsDefault(T value) => EqualityComparer.Equals(value, DefaultValue);
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 5e59b50223fe9154189492d1a8dc617d
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,357 @@
#region Copyright notice and license
// Protocol Buffers - Google's data interchange format
// Copyright 2015 Google Inc. All rights reserved.
//
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file or at
// https://developers.google.com/open-source/licenses/bsd
#endregion
using System.Collections;
using System.Collections.Generic;
using System.Diagnostics;
using Google.Protobuf.Reflection;
using Google.Protobuf.WellKnownTypes;
namespace Google.Protobuf
{
/// <summary>
/// <para>A tree representation of a FieldMask. Each leaf node in this tree represent
/// a field path in the FieldMask.</para>
///
/// <para>For example, FieldMask "foo.bar,foo.baz,bar.baz" as a tree will be:</para>
/// <code>
/// [root] -+- foo -+- bar
/// | |
/// | +- baz
/// |
/// +- bar --- baz
/// </code>
///
/// <para>By representing FieldMasks with this tree structure we can easily convert
/// a FieldMask to a canonical form, merge two FieldMasks, calculate the
/// intersection to two FieldMasks and traverse all fields specified by the
/// FieldMask in a message tree.</para>
/// </summary>
internal sealed class FieldMaskTree
{
private const char FIELD_PATH_SEPARATOR = '.';
internal sealed class Node
{
public Dictionary<string, Node> Children { get; } = new Dictionary<string, Node>();
}
private readonly Node root = new Node();
/// <summary>
/// Creates an empty FieldMaskTree.
/// </summary>
public FieldMaskTree()
{
}
/// <summary>
/// Creates a FieldMaskTree for a given FieldMask.
/// </summary>
public FieldMaskTree(FieldMask mask)
{
MergeFromFieldMask(mask);
}
public override string ToString()
{
return ToFieldMask().ToString();
}
/// <summary>
/// Adds a field path to the tree. In a FieldMask, every field path matches the
/// specified field as well as all its sub-fields. For example, a field path
/// "foo.bar" matches field "foo.bar" and also "foo.bar.baz", etc. When adding
/// a field path to the tree, redundant sub-paths will be removed. That is,
/// after adding "foo.bar" to the tree, "foo.bar.baz" will be removed if it
/// exists, which will turn the tree node for "foo.bar" to a leaf node.
/// Likewise, if the field path to add is a sub-path of an existing leaf node,
/// nothing will be changed in the tree.
/// </summary>
public FieldMaskTree AddFieldPath(string path)
{
var parts = path.Split(FIELD_PATH_SEPARATOR);
if (parts.Length == 0)
{
return this;
}
var node = root;
var createNewBranch = false;
// Find the matching node in the tree.
foreach (var part in parts)
{
// Check whether the path matches an existing leaf node.
if (!createNewBranch
&& node != root
&& node.Children.Count == 0)
{
// The path to add is a sub-path of an existing leaf node.
return this;
}
if (!node.Children.TryGetValue(part, out Node childNode))
{
createNewBranch = true;
childNode = new Node();
node.Children.Add(part, childNode);
}
node = childNode;
}
// Turn the matching node into a leaf node (i.e., remove sub-paths).
node.Children.Clear();
return this;
}
/// <summary>
/// Merges all field paths in a FieldMask into this tree.
/// </summary>
public FieldMaskTree MergeFromFieldMask(FieldMask mask)
{
foreach (var path in mask.Paths)
{
AddFieldPath(path);
}
return this;
}
/// <summary>
/// Converts this tree to a FieldMask.
/// </summary>
public FieldMask ToFieldMask()
{
var mask = new FieldMask();
if (root.Children.Count != 0)
{
var paths = new List<string>();
GetFieldPaths(root, "", paths);
mask.Paths.AddRange(paths);
}
return mask;
}
/// <summary>
/// Gathers all field paths in a sub-tree.
/// </summary>
private void GetFieldPaths(Node node, string path, List<string> paths)
{
if (node.Children.Count == 0)
{
paths.Add(path);
return;
}
foreach (var entry in node.Children)
{
var childPath = path.Length == 0 ? entry.Key : path + "." + entry.Key;
GetFieldPaths(entry.Value, childPath, paths);
}
}
/// <summary>
/// Adds the intersection of this tree with the given <paramref name="path"/> to <paramref name="output"/>.
/// </summary>
public void IntersectFieldPath(string path, FieldMaskTree output)
{
if (root.Children.Count == 0)
{
return;
}
var parts = path.Split(FIELD_PATH_SEPARATOR);
if (parts.Length == 0)
{
return;
}
var node = root;
foreach (var part in parts)
{
if (node != root
&& node.Children.Count == 0)
{
// The given path is a sub-path of an existing leaf node in the tree.
output.AddFieldPath(path);
return;
}
if (!node.Children.TryGetValue(part, out node))
{
return;
}
}
// We found a matching node for the path. All leaf children of this matching
// node is in the intersection.
var paths = new List<string>();
GetFieldPaths(node, path, paths);
foreach (var value in paths)
{
output.AddFieldPath(value);
}
}
/// <summary>
/// Merges all fields specified by this FieldMaskTree from <paramref name="source"/> to <paramref name="destination"/>.
/// </summary>
public void Merge(IMessage source, IMessage destination, FieldMask.MergeOptions options)
{
if (source.Descriptor != destination.Descriptor)
{
throw new InvalidProtocolBufferException("Cannot merge messages of different types.");
}
if (root.Children.Count == 0)
{
return;
}
Merge(root, "", source, destination, options);
}
/// <summary>
/// Merges all fields specified by a sub-tree from <paramref name="source"/> to <paramref name="destination"/>.
/// </summary>
private void Merge(
Node node,
string path,
IMessage source,
IMessage destination,
FieldMask.MergeOptions options)
{
if (source.Descriptor != destination.Descriptor)
{
throw new InvalidProtocolBufferException($"source ({source.Descriptor}) and destination ({destination.Descriptor}) descriptor must be equal");
}
var descriptor = source.Descriptor;
foreach (var entry in node.Children)
{
var field = descriptor.FindFieldByName(entry.Key);
if (field == null)
{
Debug.WriteLine($"Cannot find field \"{entry.Key}\" in message type \"{descriptor.FullName}\"");
continue;
}
if (entry.Value.Children.Count != 0)
{
if (field.IsRepeated
|| field.FieldType != FieldType.Message)
{
Debug.WriteLine($"Field \"{field.FullName}\" is not a singular message field and cannot have sub-fields.");
continue;
}
var sourceField = field.Accessor.GetValue(source);
var destinationField = field.Accessor.GetValue(destination);
if (sourceField == null
&& destinationField == null)
{
// If the message field is not present in both source and destination, skip recursing
// so we don't create unnecessary empty messages.
continue;
}
if (destinationField == null)
{
// If we have to merge but the destination does not contain the field, create it.
destinationField = field.MessageType.Parser.CreateTemplate();
field.Accessor.SetValue(destination, destinationField);
}
if (sourceField == null)
{
// If the message field is not present in the source but is in the destination, create an empty one
// so we can properly handle child entries
sourceField = field.MessageType.Parser.CreateTemplate();
}
var childPath = path.Length == 0 ? entry.Key : path + "." + entry.Key;
Merge(entry.Value, childPath, (IMessage)sourceField, (IMessage)destinationField, options);
continue;
}
if (field.IsRepeated)
{
if (options.ReplaceRepeatedFields)
{
field.Accessor.Clear(destination);
}
var sourceField = (IList)field.Accessor.GetValue(source);
var destinationField = (IList)field.Accessor.GetValue(destination);
foreach (var element in sourceField)
{
destinationField.Add(element);
}
}
else
{
var sourceField = field.Accessor.GetValue(source);
if (field.FieldType == FieldType.Message)
{
if (options.ReplaceMessageFields)
{
if (sourceField == null)
{
field.Accessor.Clear(destination);
}
else
{
field.Accessor.SetValue(destination, sourceField);
}
}
else
{
if (sourceField != null)
{
// Well-known wrapper types are represented as nullable primitive types, so we do not "merge" them.
// Instead, any non-null value just overwrites the previous value directly.
if (field.MessageType.IsWrapperType)
{
field.Accessor.SetValue(destination, sourceField);
}
else
{
var sourceByteString = ((IMessage)sourceField).ToByteString();
var destinationValue = (IMessage)field.Accessor.GetValue(destination);
if (destinationValue != null)
{
destinationValue.MergeFrom(sourceByteString);
}
else
{
field.Accessor.SetValue(destination, field.MessageType.Parser.ParseFrom(sourceByteString));
}
}
}
}
}
else
{
if (sourceField != null
|| !options.ReplacePrimitiveFields)
{
field.Accessor.SetValue(destination, sourceField);
}
else
{
field.Accessor.Clear(destination);
}
}
}
}
}
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 77987c35c51cf3c4eb52e6650ddd8ce6
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,26 @@
#region Copyright notice and license
// Protocol Buffers - Google's data interchange format
// Copyright 2008 Google Inc. All rights reserved.
//
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file or at
// https://developers.google.com/open-source/licenses/bsd
#endregion
using System;
using System.Text.RegularExpressions;
namespace Google.Protobuf
{
/// <summary>
/// Class containing helpful workarounds for various platform compatibility
/// </summary>
internal static class FrameworkPortability
{
// The value of RegexOptions.Compiled is 8. We can test for the presence at
// execution time using Enum.IsDefined, so a single build will do the right thing
// on each platform. (RegexOptions.Compiled isn't supported by PCLs.)
internal static readonly RegexOptions CompiledRegexWhereAvailable =
Enum.IsDefined(typeof(RegexOptions), 8) ? (RegexOptions)8 : RegexOptions.None;
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 92539e5d5f80fb04d849ee37ffc76fb6
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,14 @@
{
"name": "Google.Protobuf",
"rootNamespace": "",
"references": [],
"includePlatforms": [],
"excludePlatforms": [],
"allowUnsafeCode": true,
"overrideReferences": false,
"precompiledReferences": [],
"autoReferenced": true,
"defineConstraints": [],
"versionDefines": [],
"noEngineReferences": false
}

View File

@ -0,0 +1,7 @@
fileFormatVersion: 2
guid: 3ea2eebe4fbf8bc4dbf2d9a3565807ae
AssemblyDefinitionImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,30 @@
#region Copyright notice and license
// Protocol Buffers - Google's data interchange format
// Copyright 2008 Google Inc. All rights reserved.
//
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file or at
// https://developers.google.com/open-source/licenses/bsd
#endregion
namespace Google.Protobuf
{
/// <summary>
/// Interface for a Protocol Buffers message, supporting
/// parsing from <see cref="ParseContext"/> and writing to <see cref="WriteContext"/>.
/// </summary>
public interface IBufferMessage : IMessage
{
/// <summary>
/// Internal implementation of merging data from given parse context into this message.
/// Users should never invoke this method directly.
/// </summary>
void InternalMergeFrom(ref ParseContext ctx);
/// <summary>
/// Internal implementation of writing this message to a given write context.
/// Users should never invoke this method directly.
/// </summary>
void InternalWriteTo(ref WriteContext ctx);
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 8504bba83a1fddd4883241c00340ef52
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,46 @@
#region Copyright notice and license
// Protocol Buffers - Google's data interchange format
// Copyright 2016 Google Inc. All rights reserved.
//
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file or at
// https://developers.google.com/open-source/licenses/bsd
#endregion
namespace Google.Protobuf
{
/// <summary>
/// A message type that has a custom string format for diagnostic purposes.
/// </summary>
/// <remarks>
/// <para>
/// Calling <see cref="object.ToString"/> on a generated message type normally
/// returns the JSON representation. If a message type implements this interface,
/// then the <see cref="ToDiagnosticString"/> method will be called instead of the regular
/// JSON formatting code, but only when <c>ToString()</c> is called either on the message itself
/// or on another message which contains it. This does not affect the normal JSON formatting of
/// the message.
/// </para>
/// <para>
/// For example, if you create a proto message representing a GUID, the internal
/// representation may be a <c>bytes</c> field or four <c>fixed32</c> fields. However, when debugging
/// it may be more convenient to see a result in the same format as <see cref="System.Guid"/> provides.
/// </para>
/// <para>This interface extends <see cref="IMessage"/> to avoid it accidentally being implemented
/// on types other than messages, where it would not be used by anything in the framework.</para>
/// </remarks>
public interface ICustomDiagnosticMessage : IMessage
{
/// <summary>
/// Returns a string representation of this object, for diagnostic purposes.
/// </summary>
/// <remarks>
/// This method is called when a message is formatted as part of a <see cref="object.ToString"/>
/// call. It does not affect the JSON representation used by <see cref="JsonFormatter"/> other than
/// in calls to <see cref="JsonFormatter.ToDiagnosticString(IMessage)"/>. While it is recommended
/// that the result is valid JSON, this is never assumed by the Protobuf library.
/// </remarks>
/// <returns>A string representation of this object, for diagnostic purposes.</returns>
string ToDiagnosticString();
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: eb9680f7419ab7b48a823cb24c879897
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,31 @@
#region Copyright notice and license
// Protocol Buffers - Google's data interchange format
// Copyright 2015 Google Inc. All rights reserved.
//
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file or at
// https://developers.google.com/open-source/licenses/bsd
#endregion
namespace Google.Protobuf
{
/// <summary>
/// Generic interface for a deeply cloneable type.
/// </summary>
/// <remarks>
/// <para>
/// All generated messages implement this interface, but so do some non-message types.
/// Additionally, due to the type constraint on <c>T</c> in <see cref="IMessage{T}"/>,
/// it is simpler to keep this as a separate interface.
/// </para>
/// </remarks>
/// <typeparam name="T">The type itself, returned by the <see cref="Clone"/> method.</typeparam>
public interface IDeepCloneable<T>
{
/// <summary>
/// Creates a deep clone of this object.
/// </summary>
/// <returns>A deep clone of this object.</returns>
T Clone();
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 865e37e1dfa3cbf41ada3f549537efc8
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,56 @@
#region Copyright notice and license
// Protocol Buffers - Google's data interchange format
// Copyright 2015 Google Inc. All rights reserved.
//
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file or at
// https://developers.google.com/open-source/licenses/bsd
#endregion
using Google.Protobuf.Collections;
namespace Google.Protobuf
{
/// <summary>
/// Generic interface for a Protocol Buffers message containing one or more extensions, where the type parameter is expected to be the same type as the implementation class.
/// This interface is experiemental and is subject to change.
/// </summary>
public interface IExtendableMessage<T> : IMessage<T> where T : IExtendableMessage<T>
{
/// <summary>
/// Gets the value of the specified extension
/// </summary>
TValue GetExtension<TValue>(Extension<T, TValue> extension);
/// <summary>
/// Gets the value of the specified repeated extension or null if the extension isn't registered in this set.
/// For a version of this method that never returns null, use <see cref="IExtendableMessage{T}.GetOrInitializeExtension{TValue}(RepeatedExtension{T, TValue})"/>
/// </summary>
RepeatedField<TValue> GetExtension<TValue>(RepeatedExtension<T, TValue> extension);
/// <summary>
/// Gets the value of the specified repeated extension, registering it if it hasn't already been registered.
/// </summary>
RepeatedField<TValue> GetOrInitializeExtension<TValue>(RepeatedExtension<T, TValue> extension);
/// <summary>
/// Sets the value of the specified extension
/// </summary>
void SetExtension<TValue>(Extension<T, TValue> extension, TValue value);
/// <summary>
/// Gets whether the value of the specified extension is set
/// </summary>
bool HasExtension<TValue>(Extension<T, TValue> extension);
/// <summary>
/// Clears the value of the specified extension
/// </summary>
void ClearExtension<TValue>(Extension<T, TValue> extension);
/// <summary>
/// Clears the value of the specified repeated extension
/// </summary>
void ClearExtension<TValue>(RepeatedExtension<T, TValue> extension);
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 8d04c76e497015c4f944e9edd078fc42
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,64 @@
#region Copyright notice and license
// Protocol Buffers - Google's data interchange format
// Copyright 2008 Google Inc. All rights reserved.
//
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file or at
// https://developers.google.com/open-source/licenses/bsd
#endregion
using System;
using Google.Protobuf.Reflection;
namespace Google.Protobuf
{
/// <summary>
/// Interface for a Protocol Buffers message, supporting
/// basic operations required for serialization.
/// </summary>
public interface IMessage
{
/// <summary>
/// Merges the data from the specified coded input stream with the current message.
/// </summary>
/// <remarks>See the user guide for precise merge semantics.</remarks>
/// <param name="input"></param>
void MergeFrom(CodedInputStream input);
/// <summary>
/// Writes the data to the given coded output stream.
/// </summary>
/// <param name="output">Coded output stream to write the data to. Must not be null.</param>
void WriteTo(CodedOutputStream output);
/// <summary>
/// Calculates the size of this message in Protocol Buffer wire format, in bytes.
/// </summary>
/// <returns>The number of bytes required to write this message
/// to a coded output stream.</returns>
int CalculateSize();
/// <summary>
/// Descriptor for this message. All instances are expected to return the same descriptor,
/// and for generated types this will be an explicitly-implemented member, returning the
/// same value as the static property declared on the type.
/// </summary>
MessageDescriptor Descriptor { get; }
}
/// <summary>
/// Generic interface for a Protocol Buffers message,
/// where the type parameter is expected to be the same type as
/// the implementation class.
/// </summary>
/// <typeparam name="T">The message type.</typeparam>
public interface IMessage<T> : IMessage, IEquatable<T>, IDeepCloneable<T> where T : IMessage<T>
{
/// <summary>
/// Merges the given message into this one.
/// </summary>
/// <remarks>See the user guide for precise merge semantics.</remarks>
/// <param name="message">The message to merge with this one. Must not be null.</param>
void MergeFrom(T message);
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 7f8804e406a70a24b913d2c64cc0a2d5
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,30 @@
#region Copyright notice and license
// Protocol Buffers - Google's data interchange format
// Copyright 2015 Google Inc. All rights reserved.
//
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file or at
// https://developers.google.com/open-source/licenses/bsd
#endregion
using System.IO;
namespace Google.Protobuf
{
/// <summary>
/// Thrown when an attempt is made to parse invalid JSON, e.g. using
/// a non-string property key, or including a redundant comma. Parsing a protocol buffer
/// message represented in JSON using <see cref="JsonParser"/> can throw both this
/// exception and <see cref="InvalidProtocolBufferException"/> depending on the situation. This
/// exception is only thrown for "pure JSON" errors, whereas <c>InvalidProtocolBufferException</c>
/// is thrown when the JSON may be valid in and of itself, but cannot be parsed as a protocol buffer
/// message.
/// </summary>
public sealed class InvalidJsonException : IOException
{
internal InvalidJsonException(string message)
: base(message)
{
}
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: c1974c1a986320e458e5cc80b32f1b2a
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,117 @@
#region Copyright notice and license
// Protocol Buffers - Google's data interchange format
// Copyright 2008 Google Inc. All rights reserved.
//
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file or at
// https://developers.google.com/open-source/licenses/bsd
#endregion
using System;
using System.IO;
namespace Google.Protobuf
{
/// <summary>
/// Thrown when a protocol message being parsed is invalid in some way,
/// e.g. it contains a malformed varint or a negative byte length.
/// </summary>
public sealed class InvalidProtocolBufferException : IOException
{
internal InvalidProtocolBufferException(string message)
: base(message)
{
}
internal InvalidProtocolBufferException(string message, Exception innerException)
: base(message, innerException)
{
}
internal static InvalidProtocolBufferException MoreDataAvailable()
{
return new InvalidProtocolBufferException(
"Completed reading a message while more data was available in the stream.");
}
internal static InvalidProtocolBufferException TruncatedMessage()
{
return new InvalidProtocolBufferException(
"While parsing a protocol message, the input ended unexpectedly " +
"in the middle of a field. This could mean either that the " +
"input has been truncated or that an embedded message " +
"misreported its own length.");
}
internal static InvalidProtocolBufferException NegativeSize()
{
return new InvalidProtocolBufferException(
"CodedInputStream encountered an embedded string or message " +
"which claimed to have negative size.");
}
internal static InvalidProtocolBufferException MalformedVarint()
{
return new InvalidProtocolBufferException(
"CodedInputStream encountered a malformed varint.");
}
/// <summary>
/// Creates an exception for an error condition of an invalid tag being encountered.
/// </summary>
internal static InvalidProtocolBufferException InvalidTag()
{
return new InvalidProtocolBufferException(
"Protocol message contained an invalid tag (zero).");
}
internal static InvalidProtocolBufferException InvalidWireType()
{
return new InvalidProtocolBufferException(
"Protocol message contained a tag with an invalid wire type.");
}
internal static InvalidProtocolBufferException InvalidBase64(Exception innerException)
{
return new InvalidProtocolBufferException("Invalid base64 data", innerException);
}
internal static InvalidProtocolBufferException InvalidEndTag()
{
return new InvalidProtocolBufferException(
"Protocol message end-group tag did not match expected tag.");
}
internal static InvalidProtocolBufferException RecursionLimitExceeded()
{
return new InvalidProtocolBufferException(
"Protocol message had too many levels of nesting. May be malicious. " +
"Use CodedInputStream.SetRecursionLimit() to increase the depth limit.");
}
internal static InvalidProtocolBufferException JsonRecursionLimitExceeded()
{
return new InvalidProtocolBufferException(
"Protocol message had too many levels of nesting. May be malicious. " +
"Use JsonParser.Settings to increase the depth limit.");
}
internal static InvalidProtocolBufferException SizeLimitExceeded()
{
return new InvalidProtocolBufferException(
"Protocol message was too large. May be malicious. " +
"Use CodedInputStream.SetSizeLimit() to increase the size limit.");
}
internal static InvalidProtocolBufferException InvalidMessageStreamTag()
{
return new InvalidProtocolBufferException(
"Stream of protocol messages had invalid tag. Expected tag is length-delimited field 1.");
}
internal static InvalidProtocolBufferException MissingFields()
{
return new InvalidProtocolBufferException("Message was missing required fields");
}
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: a160e176e560e834bac29808462085d1
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,917 @@
#region Copyright notice and license
// Protocol Buffers - Google's data interchange format
// Copyright 2015 Google Inc. All rights reserved.
//
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file or at
// https://developers.google.com/open-source/licenses/bsd
#endregion
using System;
using System.Collections;
using System.Collections.Concurrent;
using System.Collections.Generic;
using System.Diagnostics.CodeAnalysis;
using System.Globalization;
using System.IO;
using System.Linq;
using System.Reflection;
using System.Text;
using Google.Protobuf.Reflection;
using Google.Protobuf.WellKnownTypes;
namespace Google.Protobuf {
/// <summary>
/// Reflection-based converter from messages to JSON.
/// </summary>
/// <remarks>
/// <para>
/// Instances of this class are thread-safe, with no mutable state.
/// </para>
/// <para>
/// This is a simple start to get JSON formatting working. As it's reflection-based,
/// it's not as quick as baking calls into generated messages - but is a simpler implementation.
/// (This code is generally not heavily optimized.)
/// </para>
/// </remarks>
public sealed class JsonFormatter {
internal const string AnyTypeUrlField = "@type";
internal const string AnyDiagnosticValueField = "@value";
internal const string AnyWellKnownTypeValueField = "value";
private const string NameValueSeparator = ": ";
private const string ValueSeparator = ", ";
private const string MultilineValueSeparator = ",";
private const char ObjectOpenBracket = '{';
private const char ObjectCloseBracket = '}';
private const char ListBracketOpen = '[';
private const char ListBracketClose = ']';
/// <summary>
/// Returns a formatter using the default settings.
/// </summary>
public static JsonFormatter Default { get; } = new JsonFormatter(Settings.Default);
// A JSON formatter which *only* exists
private static readonly JsonFormatter diagnosticFormatter = new JsonFormatter(Settings.Default);
/// <summary>
/// The JSON representation of the first 160 characters of Unicode.
/// Empty strings are replaced by the static constructor.
/// </summary>
private static readonly string[] CommonRepresentations = {
// C0 (ASCII and derivatives) control characters
"\\u0000", "\\u0001", "\\u0002", "\\u0003", // 0x00
"\\u0004", "\\u0005", "\\u0006", "\\u0007", "\\b", "\\t", "\\n", "\\u000b", "\\f", "\\r",
"\\u000e", "\\u000f", "\\u0010", "\\u0011", "\\u0012", "\\u0013", // 0x10
"\\u0014", "\\u0015", "\\u0016", "\\u0017", "\\u0018", "\\u0019", "\\u001a", "\\u001b",
"\\u001c", "\\u001d", "\\u001e", "\\u001f",
// Escaping of " and \ are required by www.json.org string definition.
// Escaping of < and > are required for HTML security.
"", "", "\\\"", "", "", "", "", "", // 0x20
"", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", // 0x30
"", "", "", "", "\\u003c", "", "\\u003e", "", "", "", "", "", "", "", "", "", // 0x40
"", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", // 0x50
"", "", "", "", "\\\\", "", "", "", "", "", "", "", "", "", "", "", // 0x60
"", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", // 0x70
"", "", "", "", "", "", "", "\\u007f",
// C1 (ISO 8859 and Unicode) extended control characters
"\\u0080", "\\u0081", "\\u0082", "\\u0083", // 0x80
"\\u0084", "\\u0085", "\\u0086", "\\u0087", "\\u0088", "\\u0089", "\\u008a", "\\u008b",
"\\u008c", "\\u008d", "\\u008e", "\\u008f", "\\u0090", "\\u0091", "\\u0092",
"\\u0093", // 0x90
"\\u0094", "\\u0095", "\\u0096", "\\u0097", "\\u0098", "\\u0099", "\\u009a", "\\u009b",
"\\u009c", "\\u009d", "\\u009e", "\\u009f"
};
static JsonFormatter() {
for (int i = 0; i < CommonRepresentations.Length; i++) {
if (CommonRepresentations[i] == "") {
CommonRepresentations[i] = ((char)i).ToString();
}
}
}
private readonly Settings settings;
private bool DiagnosticOnly => ReferenceEquals(this, diagnosticFormatter);
/// <summary>
/// Creates a new formatted with the given settings.
/// </summary>
/// <param name="settings">The settings.</param>
public JsonFormatter(Settings settings) {
this.settings = ProtoPreconditions.CheckNotNull(settings, nameof(settings));
}
/// <summary>
/// Formats the specified message as JSON.
/// </summary>
/// <param name="message">The message to format.</param>
/// <remarks>This method delegates to <c>Format(IMessage, int)</c> with <c>indentationLevel =
/// 0</c>.</remarks> <returns>The formatted message.</returns>
public string Format(IMessage message) => Format(message, indentationLevel: 0);
/// <summary>
/// Formats the specified message as JSON.
/// </summary>
/// <param name="message">The message to format.</param>
/// <param name="indentationLevel">Indentation level to start at.</param>
/// <remarks>To keep consistent indentation when embedding a message inside another JSON string,
/// set <paramref name="indentationLevel"/>. E.g: <code> var response = $@"{{
/// ""data"": { Format(message, indentationLevel: 1) }
/// }}"</code>
/// </remarks>
/// <returns>The formatted message.</returns>
public string Format(IMessage message, int indentationLevel) {
var writer = new StringWriter();
Format(message, writer, indentationLevel);
return writer.ToString();
}
/// <summary>
/// Formats the specified message as JSON.
/// </summary>
/// <param name="message">The message to format.</param>
/// <param name="writer">The TextWriter to write the formatted message to.</param>
/// <remarks>This method delegates to <c>Format(IMessage, TextWriter, int)</c> with
/// <c>indentationLevel = 0</c>.</remarks> <returns>The formatted message.</returns>
public void Format(IMessage message, TextWriter writer) => Format(message, writer,
indentationLevel: 0);
/// <summary>
/// Formats the specified message as JSON. When <see cref="Settings.Indentation"/> is not null,
/// start indenting at the specified <paramref name="indentationLevel"/>.
/// </summary>
/// <param name="message">The message to format.</param>
/// <param name="writer">The TextWriter to write the formatted message to.</param>
/// <param name="indentationLevel">Indentation level to start at.</param>
/// <remarks>To keep consistent indentation when embedding a message inside another JSON string,
/// set <paramref name="indentationLevel"/>.</remarks>
public void Format(IMessage message, TextWriter writer, int indentationLevel) {
ProtoPreconditions.CheckNotNull(message, nameof(message));
ProtoPreconditions.CheckNotNull(writer, nameof(writer));
if (message.Descriptor.IsWellKnownType) {
WriteWellKnownTypeValue(writer, message.Descriptor, message, indentationLevel);
} else {
WriteMessage(writer, message, indentationLevel);
}
}
/// <summary>
/// Converts a message to JSON for diagnostic purposes with no extra context.
/// </summary>
/// <remarks>
/// <para>
/// This differs from calling <see cref="Format(IMessage)"/> on the default JSON
/// formatter in its handling of <see cref="Any"/>. As no type registry is available
/// in <see cref="object.ToString"/> calls, the normal way of resolving the type of
/// an <c>Any</c> message cannot be applied. Instead, a JSON property named <c>@value</c>
/// is included with the base64 data from the <see cref="Any.Value"/> property of the message.
/// </para>
/// <para>The value returned by this method is only designed to be used for diagnostic
/// purposes. It may not be parsable by <see cref="JsonParser"/>, and may not be parsable
/// by other Protocol Buffer implementations.</para>
/// </remarks>
/// <param name="message">The message to format for diagnostic purposes.</param>
/// <returns>The diagnostic-only JSON representation of the message</returns>
public static string ToDiagnosticString(IMessage message) {
ProtoPreconditions.CheckNotNull(message, nameof(message));
return diagnosticFormatter.Format(message);
}
private void WriteMessage(TextWriter writer, IMessage message, int indentationLevel) {
if (message == null) {
WriteNull(writer);
return;
}
if (DiagnosticOnly) {
if (message is ICustomDiagnosticMessage customDiagnosticMessage) {
writer.Write(customDiagnosticMessage.ToDiagnosticString());
return;
}
}
WriteBracketOpen(writer, ObjectOpenBracket);
bool writtenFields = WriteMessageFields(writer, message, false, indentationLevel + 1);
WriteBracketClose(writer, ObjectCloseBracket, writtenFields, indentationLevel);
}
private bool WriteMessageFields(TextWriter writer, IMessage message,
bool assumeFirstFieldWritten, int indentationLevel) {
var fields = message.Descriptor.Fields;
bool first = !assumeFirstFieldWritten;
// First non-oneof fields
foreach (var field in fields.InFieldNumberOrder()) {
var accessor = field.Accessor;
var value = accessor.GetValue(message);
if (!ShouldFormatFieldValue(message, field, value)) {
continue;
}
MaybeWriteValueSeparator(writer, first);
MaybeWriteValueWhitespace(writer, indentationLevel);
if (settings.PreserveProtoFieldNames) {
WriteString(writer, accessor.Descriptor.Name);
} else {
WriteString(writer, accessor.Descriptor.JsonName);
}
writer.Write(NameValueSeparator);
WriteValue(writer, value, indentationLevel);
first = false;
}
return !first;
}
private void MaybeWriteValueSeparator(TextWriter writer, bool first) {
if (first) {
return;
}
writer.Write(settings.Indentation == null ? ValueSeparator : MultilineValueSeparator);
}
/// <summary>
/// Determines whether or not a field value should be serialized according to the field,
/// its value in the message, and the settings of this formatter.
/// </summary>
private bool ShouldFormatFieldValue(IMessage message, FieldDescriptor field, object value) =>
field.HasPresence
// Fields that support presence *just* use that
? field.Accessor.HasValue(message)
// Otherwise, format if either we've been asked to format default values, or if it's
// not a default value anyway.
: settings.FormatDefaultValues || !IsDefaultValue(field, value);
// Converted from java/core/src/main/java/com/google/protobuf/Descriptors.java
internal static string ToJsonName(string name) {
StringBuilder result = new StringBuilder(name.Length);
bool isNextUpperCase = false;
foreach (char ch in name) {
if (ch == '_') {
isNextUpperCase = true;
} else if (isNextUpperCase) {
result.Append(char.ToUpperInvariant(ch));
isNextUpperCase = false;
} else {
result.Append(ch);
}
}
return result.ToString();
}
internal static string FromJsonName(string name) {
StringBuilder result = new StringBuilder(name.Length);
foreach (char ch in name) {
if (char.IsUpper(ch)) {
result.Append('_');
result.Append(char.ToLowerInvariant(ch));
} else {
result.Append(ch);
}
}
return result.ToString();
}
private static void WriteNull(TextWriter writer) {
writer.Write("null");
}
private static bool IsDefaultValue(FieldDescriptor descriptor, object value) {
if (descriptor.IsMap) {
IDictionary dictionary = (IDictionary)value;
return dictionary.Count == 0;
}
if (descriptor.IsRepeated) {
IList list = (IList)value;
return list.Count == 0;
}
return descriptor.FieldType switch {
FieldType.Bool => (bool)value == false,
FieldType.Bytes => (ByteString)value == ByteString.Empty,
FieldType.String => (string)value == "",
FieldType.Double => (double)value == 0.0,
FieldType.SInt32 or FieldType.Int32 or FieldType.SFixed32 or FieldType.Enum =>
(int)value == 0,
FieldType.Fixed32 or FieldType.UInt32 => (uint)value == 0,
FieldType.Fixed64 or FieldType.UInt64 => (ulong)value == 0,
FieldType.SFixed64 or FieldType.Int64 or FieldType.SInt64 => (long)value == 0,
FieldType.Float => (float)value == 0f,
FieldType.Message or FieldType.Group => value == null,
_ => throw new ArgumentException("Invalid field type"),
};
}
/// <summary>
/// Writes a single value to the given writer as JSON. Only types understood by
/// Protocol Buffers can be written in this way. This method is only exposed for
/// advanced use cases; most users should be using <see cref="Format(IMessage)"/>
/// or <see cref="Format(IMessage, TextWriter)"/>.
/// </summary>
/// <param name="writer">The writer to write the value to. Must not be null.</param>
/// <param name="value">The value to write. May be null.</param>
/// <remarks>Delegates to <c>WriteValue(TextWriter, object, int)</c> with <c>indentationLevel =
/// 0</c>.</remarks>
public void WriteValue(TextWriter writer, object value) => WriteValue(writer, value, 0);
/// <summary>
/// Writes a single value to the given writer as JSON. Only types understood by
/// Protocol Buffers can be written in this way. This method is only exposed for
/// advanced use cases; most users should be using <see cref="Format(IMessage)"/>
/// or <see cref="Format(IMessage, TextWriter)"/>.
/// </summary>
/// <param name="writer">The writer to write the value to. Must not be null.</param>
/// <param name="value">The value to write. May be null.</param>
/// <param name="indentationLevel">The current indentationLevel. Not used when <see
/// cref="Settings.Indentation"/> is null.</param>
public void WriteValue(TextWriter writer, object value, int indentationLevel) {
if (value == null || value is NullValue) {
WriteNull(writer);
} else if (value is bool b) {
writer.Write(b ? "true" : "false");
} else if (value is ByteString byteString) {
// Nothing in Base64 needs escaping
writer.Write('"');
writer.Write(byteString.ToBase64());
writer.Write('"');
} else if (value is string str) {
WriteString(writer, str);
} else if (value is IDictionary dictionary) {
WriteDictionary(writer, dictionary, indentationLevel);
} else if (value is IList list) {
WriteList(writer, list, indentationLevel);
} else if (value is int || value is uint) {
IFormattable formattable = (IFormattable)value;
writer.Write(formattable.ToString("d", CultureInfo.InvariantCulture));
} else if (value is long || value is ulong) {
writer.Write('"');
IFormattable formattable = (IFormattable)value;
writer.Write(formattable.ToString("d", CultureInfo.InvariantCulture));
writer.Write('"');
} else if (value is System.Enum) {
if (settings.FormatEnumsAsIntegers) {
WriteValue(writer, (int)value);
} else {
string name = OriginalEnumValueHelper.GetOriginalName(value);
if (name != null) {
WriteString(writer, name);
} else {
WriteValue(writer, (int)value);
}
}
} else if (value is float || value is double) {
string text = ((IFormattable)value).ToString("r", CultureInfo.InvariantCulture);
if (text == "NaN" || text == "Infinity" || text == "-Infinity") {
writer.Write('"');
writer.Write(text);
writer.Write('"');
} else {
writer.Write(text);
}
} else if (value is IMessage message) {
Format(message, writer, indentationLevel);
} else {
throw new ArgumentException("Unable to format value of type " + value.GetType());
}
}
/// <summary>
/// Central interception point for well-known type formatting. Any well-known types which
/// don't need special handling can fall back to WriteMessage. We avoid assuming that the
/// values are using the embedded well-known types, in order to allow for dynamic messages
/// in the future.
/// </summary>
private void WriteWellKnownTypeValue(TextWriter writer, MessageDescriptor descriptor,
object value, int indentationLevel) {
// Currently, we can never actually get here, because null values are always handled by the
// caller. But if we *could*, this would do the right thing.
if (value == null) {
WriteNull(writer);
return;
}
// For wrapper types, the value will either be the (possibly boxed) "native" value,
// or the message itself if we're formatting it at the top level (e.g. just calling ToString
// on the object itself). If it's the message form, we can extract the value first, which
// *will* be the (possibly boxed) native value, and then proceed, writing it as if we were
// definitely in a field. (We never need to wrap it in an extra string... WriteValue will do
// the right thing.)
if (descriptor.IsWrapperType) {
if (value is IMessage message) {
value = message.Descriptor.Fields[WrappersReflection.WrapperValueFieldNumber]
.Accessor.GetValue(message);
}
WriteValue(writer, value);
return;
}
if (descriptor.FullName == Timestamp.Descriptor.FullName) {
WriteTimestamp(writer, (IMessage)value);
return;
}
if (descriptor.FullName == Duration.Descriptor.FullName) {
WriteDuration(writer, (IMessage)value);
return;
}
if (descriptor.FullName == FieldMask.Descriptor.FullName) {
WriteFieldMask(writer, (IMessage)value);
return;
}
if (descriptor.FullName == Struct.Descriptor.FullName) {
WriteStruct(writer, (IMessage)value, indentationLevel);
return;
}
if (descriptor.FullName == ListValue.Descriptor.FullName) {
var fieldAccessor = descriptor.Fields[ListValue.ValuesFieldNumber].Accessor;
WriteList(writer, (IList)fieldAccessor.GetValue((IMessage)value), indentationLevel);
return;
}
if (descriptor.FullName == Value.Descriptor.FullName) {
WriteStructFieldValue(writer, (IMessage)value, indentationLevel);
return;
}
if (descriptor.FullName == Any.Descriptor.FullName) {
WriteAny(writer, (IMessage)value, indentationLevel);
return;
}
WriteMessage(writer, (IMessage)value, indentationLevel);
}
private void WriteTimestamp(TextWriter writer, IMessage value) {
// TODO: In the common case where this *is* using the built-in Timestamp type, we could
// avoid all the reflection at this point, by casting to Timestamp. In the interests of
// avoiding subtle bugs, don't do that until we've implemented DynamicMessage so that we can
// prove it still works in that case.
int nanos = (int)value.Descriptor.Fields[Timestamp.NanosFieldNumber].Accessor.GetValue(value);
long seconds =
(long)value.Descriptor.Fields[Timestamp.SecondsFieldNumber].Accessor.GetValue(value);
writer.Write(Timestamp.ToJson(seconds, nanos, DiagnosticOnly));
}
private void WriteDuration(TextWriter writer, IMessage value) {
// TODO: Same as for WriteTimestamp
int nanos = (int)value.Descriptor.Fields[Duration.NanosFieldNumber].Accessor.GetValue(value);
long seconds =
(long)value.Descriptor.Fields[Duration.SecondsFieldNumber].Accessor.GetValue(value);
writer.Write(Duration.ToJson(seconds, nanos, DiagnosticOnly));
}
private void WriteFieldMask(TextWriter writer, IMessage value) {
var paths =
(IList<string>)value.Descriptor.Fields[FieldMask.PathsFieldNumber].Accessor.GetValue(
value);
writer.Write(FieldMask.ToJson(paths, DiagnosticOnly));
}
private void WriteAny(TextWriter writer, IMessage value, int indentationLevel) {
if (DiagnosticOnly) {
WriteDiagnosticOnlyAny(writer, value);
return;
}
string typeUrl =
(string)value.Descriptor.Fields[Any.TypeUrlFieldNumber].Accessor.GetValue(value);
ByteString data =
(ByteString)value.Descriptor.Fields[Any.ValueFieldNumber].Accessor.GetValue(value);
string typeName = Any.GetTypeName(typeUrl);
MessageDescriptor descriptor = settings.TypeRegistry.Find(typeName);
if (descriptor == null) {
throw new InvalidOperationException(
$"Type registry has no descriptor for type name '{typeName}'");
}
IMessage message = descriptor.Parser.ParseFrom(data);
WriteBracketOpen(writer, ObjectOpenBracket);
MaybeWriteValueWhitespace(writer, indentationLevel + 1);
WriteString(writer, AnyTypeUrlField);
writer.Write(NameValueSeparator);
WriteString(writer, typeUrl);
if (descriptor.IsWellKnownType) {
writer.Write(ValueSeparator);
WriteString(writer, AnyWellKnownTypeValueField);
writer.Write(NameValueSeparator);
WriteWellKnownTypeValue(writer, descriptor, message, indentationLevel + 1);
} else {
WriteMessageFields(writer, message, true, indentationLevel + 1);
}
WriteBracketClose(writer, ObjectCloseBracket, true, indentationLevel);
}
private void WriteDiagnosticOnlyAny(TextWriter writer, IMessage value) {
string typeUrl =
(string)value.Descriptor.Fields[Any.TypeUrlFieldNumber].Accessor.GetValue(value);
ByteString data =
(ByteString)value.Descriptor.Fields[Any.ValueFieldNumber].Accessor.GetValue(value);
writer.Write("{ ");
WriteString(writer, AnyTypeUrlField);
writer.Write(NameValueSeparator);
WriteString(writer, typeUrl);
writer.Write(ValueSeparator);
WriteString(writer, AnyDiagnosticValueField);
writer.Write(NameValueSeparator);
writer.Write('"');
writer.Write(data.ToBase64());
writer.Write('"');
writer.Write(" }");
}
private void WriteStruct(TextWriter writer, IMessage message, int indentationLevel) {
WriteBracketOpen(writer, ObjectOpenBracket);
IDictionary fields =
(IDictionary)message.Descriptor.Fields[Struct.FieldsFieldNumber].Accessor.GetValue(
message);
bool first = true;
foreach (DictionaryEntry entry in fields) {
string key = (string)entry.Key;
IMessage value = (IMessage)entry.Value;
if (string.IsNullOrEmpty(key) || value == null) {
throw new InvalidOperationException(
"Struct fields cannot have an empty key or a null value.");
}
MaybeWriteValueSeparator(writer, first);
MaybeWriteValueWhitespace(writer, indentationLevel + 1);
WriteString(writer, key);
writer.Write(NameValueSeparator);
WriteStructFieldValue(writer, value, indentationLevel + 1);
first = false;
}
WriteBracketClose(writer, ObjectCloseBracket, !first, indentationLevel);
}
private void WriteStructFieldValue(TextWriter writer, IMessage message, int indentationLevel) {
var specifiedField = message.Descriptor.Oneofs[0].Accessor.GetCaseFieldDescriptor(message);
if (specifiedField == null) {
throw new InvalidOperationException("Value message must contain a value for the oneof.");
}
object value = specifiedField.Accessor.GetValue(message);
switch (specifiedField.FieldNumber) {
case Value.BoolValueFieldNumber:
case Value.StringValueFieldNumber:
case Value.NumberValueFieldNumber:
WriteValue(writer, value);
return;
case Value.StructValueFieldNumber:
case Value.ListValueFieldNumber:
// Structs and ListValues are nested messages, and already well-known types.
var nestedMessage = (IMessage)specifiedField.Accessor.GetValue(message);
WriteWellKnownTypeValue(writer, nestedMessage.Descriptor, nestedMessage,
indentationLevel);
return;
case Value.NullValueFieldNumber:
WriteNull(writer);
return;
default:
throw new InvalidOperationException("Unexpected case in struct field: " +
specifiedField.FieldNumber);
}
}
internal void WriteList(TextWriter writer, IList list, int indentationLevel = 0) {
WriteBracketOpen(writer, ListBracketOpen);
bool first = true;
foreach (var value in list) {
MaybeWriteValueSeparator(writer, first);
MaybeWriteValueWhitespace(writer, indentationLevel + 1);
WriteValue(writer, value, indentationLevel + 1);
first = false;
}
WriteBracketClose(writer, ListBracketClose, !first, indentationLevel);
}
internal void WriteDictionary(TextWriter writer, IDictionary dictionary,
int indentationLevel = 0) {
WriteBracketOpen(writer, ObjectOpenBracket);
bool first = true;
// This will box each pair. Could use IDictionaryEnumerator, but that's ugly in terms of
// disposal.
foreach (DictionaryEntry pair in dictionary) {
string keyText;
if (pair.Key is string s) {
keyText = s;
} else if (pair.Key is bool b) {
keyText = b ? "true" : "false";
} else if (pair.Key is int || pair.Key is uint || pair.Key is long || pair.Key is ulong) {
keyText = ((IFormattable)pair.Key).ToString("d", CultureInfo.InvariantCulture);
} else {
if (pair.Key == null) {
throw new ArgumentException("Dictionary has entry with null key");
}
throw new ArgumentException("Unhandled dictionary key type: " + pair.Key.GetType());
}
MaybeWriteValueSeparator(writer, first);
MaybeWriteValueWhitespace(writer, indentationLevel + 1);
WriteString(writer, keyText);
writer.Write(NameValueSeparator);
WriteValue(writer, pair.Value, indentationLevel + 1);
first = false;
}
WriteBracketClose(writer, ObjectCloseBracket, !first, indentationLevel);
}
/// <summary>
/// Writes a string (including leading and trailing double quotes) to a builder, escaping as
/// required.
/// </summary>
/// <remarks>
/// Other than surrogate pair handling, this code is mostly taken from
/// src/google/protobuf/util/internal/json_escaping.cc.
/// </remarks>
internal static void WriteString(TextWriter writer, string text) {
writer.Write('"');
for (int i = 0; i < text.Length; i++) {
char c = text[i];
if (c < 0xa0) {
writer.Write(CommonRepresentations[c]);
continue;
}
if (char.IsHighSurrogate(c)) {
// Encountered first part of a surrogate pair.
// Check that we have the whole pair, and encode both parts as hex.
i++;
if (i == text.Length || !char.IsLowSurrogate(text[i])) {
throw new ArgumentException(
"String contains low surrogate not followed by high surrogate");
}
HexEncodeUtf16CodeUnit(writer, c);
HexEncodeUtf16CodeUnit(writer, text[i]);
continue;
} else if (char.IsLowSurrogate(c)) {
throw new ArgumentException(
"String contains high surrogate not preceded by low surrogate");
}
switch ((uint)c) {
// These are not required by json spec
// but used to prevent security bugs in javascript.
case 0xfeff: // Zero width no-break space
case 0xfff9: // Interlinear annotation anchor
case 0xfffa: // Interlinear annotation separator
case 0xfffb: // Interlinear annotation terminator
case 0x00ad: // Soft-hyphen
case 0x06dd: // Arabic end of ayah
case 0x070f: // Syriac abbreviation mark
case 0x17b4: // Khmer vowel inherent Aq
case 0x17b5: // Khmer vowel inherent Aa
HexEncodeUtf16CodeUnit(writer, c);
break;
default:
if ((c >= 0x0600 && c <= 0x0603) || // Arabic signs
(c >= 0x200b && c <= 0x200f) || // Zero width etc.
(c >= 0x2028 && c <= 0x202e) || // Separators etc.
(c >= 0x2060 && c <= 0x2064) || // Invisible etc.
(c >= 0x206a && c <= 0x206f)) {
HexEncodeUtf16CodeUnit(writer, c);
} else {
// No handling of surrogates here - that's done earlier
writer.Write(c);
}
break;
}
}
writer.Write('"');
}
private const string Hex = "0123456789abcdef";
private static void HexEncodeUtf16CodeUnit(TextWriter writer, char c) {
writer.Write("\\u");
writer.Write(Hex[(c >> 12) & 0xf]);
writer.Write(Hex[(c >> 8) & 0xf]);
writer.Write(Hex[(c >> 4) & 0xf]);
writer.Write(Hex[(c >> 0) & 0xf]);
}
private void WriteBracketOpen(TextWriter writer, char openChar) {
writer.Write(openChar);
if (settings.Indentation == null) {
writer.Write(' ');
}
}
private void WriteBracketClose(TextWriter writer, char closeChar, bool hasFields,
int indentationLevel) {
if (hasFields) {
if (settings.Indentation != null) {
writer.WriteLine();
WriteIndentation(writer, indentationLevel);
} else {
writer.Write(" ");
}
}
writer.Write(closeChar);
}
private void MaybeWriteValueWhitespace(TextWriter writer, int indentationLevel) {
if (settings.Indentation != null) {
writer.WriteLine();
WriteIndentation(writer, indentationLevel);
}
}
private void WriteIndentation(TextWriter writer, int indentationLevel) {
for (int i = 0; i < indentationLevel; i++) {
writer.Write(settings.Indentation);
}
}
/// <summary>
/// Settings controlling JSON formatting.
/// </summary>
public sealed class Settings {
/// <summary>
/// Default settings, as used by <see cref="JsonFormatter.Default"/>
/// </summary>
public static Settings Default { get; }
// Workaround for the Mono compiler complaining about XML comments not being on
// valid language elements.
static Settings() {
Default = new Settings(false);
}
/// <summary>
/// Whether fields which would otherwise not be included in the formatted data
/// should be formatted even when the value is not present, or has the default value.
/// This option only affects fields which don't support "presence" (e.g.
/// singular non-optional proto3 primitive fields).
/// </summary>
public bool FormatDefaultValues { get; }
/// <summary>
/// The type registry used to format <see cref="Any"/> messages.
/// </summary>
public TypeRegistry TypeRegistry { get; }
/// <summary>
/// Whether to format enums as ints. Defaults to false.
/// </summary>
public bool FormatEnumsAsIntegers { get; }
/// <summary>
/// Whether to use the original proto field names as defined in the .proto file. Defaults to
/// false.
/// </summary>
public bool PreserveProtoFieldNames { get; }
/// <summary>
/// Indentation string, used for formatting. Setting null disables indentation.
/// </summary>
public string Indentation { get; }
/// <summary>
/// Creates a new <see cref="Settings"/> object with the specified formatting of default
/// values and an empty type registry.
/// </summary>
/// <param name="formatDefaultValues"><c>true</c> if default values (0, empty strings etc)
/// should be formatted; <c>false</c> otherwise.</param>
public Settings(bool formatDefaultValues) : this(formatDefaultValues, TypeRegistry.Empty) {}
/// <summary>
/// Creates a new <see cref="Settings"/> object with the specified formatting of default
/// values and type registry.
/// </summary>
/// <param name="formatDefaultValues"><c>true</c> if default values (0, empty strings etc)
/// should be formatted; <c>false</c> otherwise.</param> <param name="typeRegistry">The <see
/// cref="TypeRegistry"/> to use when formatting <see cref="Any"/> messages.</param>
public Settings(bool formatDefaultValues, TypeRegistry typeRegistry)
: this(formatDefaultValues, typeRegistry, false, false) {}
/// <summary>
/// Creates a new <see cref="Settings"/> object with the specified parameters.
/// </summary>
/// <param name="formatDefaultValues"><c>true</c> if default values (0, empty strings etc)
/// should be formatted; <c>false</c> otherwise.</param> <param name="typeRegistry">The <see
/// cref="TypeRegistry"/> to use when formatting <see cref="Any"/> messages.
/// TypeRegistry.Empty will be used if it is null.</param> <param
/// name="formatEnumsAsIntegers"><c>true</c> to format the enums as integers; <c>false</c> to
/// format enums as enum names.</param> <param name="preserveProtoFieldNames"><c>true</c> to
/// preserve proto field names; <c>false</c> to convert them to lowerCamelCase.</param> <param
/// name="indentation">The indentation string to use for multi-line formatting. <c>null</c> to
/// disable multi-line format.</param>
private Settings(bool formatDefaultValues, TypeRegistry typeRegistry,
bool formatEnumsAsIntegers, bool preserveProtoFieldNames,
string indentation = null) {
FormatDefaultValues = formatDefaultValues;
TypeRegistry = typeRegistry ?? TypeRegistry.Empty;
FormatEnumsAsIntegers = formatEnumsAsIntegers;
PreserveProtoFieldNames = preserveProtoFieldNames;
Indentation = indentation;
}
/// <summary>
/// Creates a new <see cref="Settings"/> object with the specified formatting of default
/// values and the current settings.
/// </summary>
/// <param name="formatDefaultValues"><c>true</c> if default values (0, empty strings etc)
/// should be formatted; <c>false</c> otherwise.</param>
public Settings WithFormatDefaultValues(bool formatDefaultValues) =>
new Settings(formatDefaultValues, TypeRegistry, FormatEnumsAsIntegers,
PreserveProtoFieldNames, Indentation);
/// <summary>
/// Creates a new <see cref="Settings"/> object with the specified type registry and the
/// current settings.
/// </summary>
/// <param name="typeRegistry">The <see cref="TypeRegistry"/> to use when formatting <see
/// cref="Any"/> messages.</param>
public Settings WithTypeRegistry(TypeRegistry typeRegistry) =>
new Settings(FormatDefaultValues, typeRegistry, FormatEnumsAsIntegers,
PreserveProtoFieldNames, Indentation);
/// <summary>
/// Creates a new <see cref="Settings"/> object with the specified enums formatting option and
/// the current settings.
/// </summary>
/// <param name="formatEnumsAsIntegers"><c>true</c> to format the enums as integers;
/// <c>false</c> to format enums as enum names.</param>
public Settings WithFormatEnumsAsIntegers(bool formatEnumsAsIntegers) =>
new Settings(FormatDefaultValues, TypeRegistry, formatEnumsAsIntegers,
PreserveProtoFieldNames, Indentation);
/// <summary>
/// Creates a new <see cref="Settings"/> object with the specified field name formatting
/// option and the current settings.
/// </summary>
/// <param name="preserveProtoFieldNames"><c>true</c> to preserve proto field names;
/// <c>false</c> to convert them to lowerCamelCase.</param>
public Settings WithPreserveProtoFieldNames(bool preserveProtoFieldNames) =>
new Settings(FormatDefaultValues, TypeRegistry, FormatEnumsAsIntegers,
preserveProtoFieldNames, Indentation);
/// <summary>
/// Creates a new <see cref="Settings"/> object with the specified indentation and the current
/// settings.
/// </summary>
/// <param name="indentation">The string to output for each level of indentation (nesting).
/// The default is two spaces per level. Use null to disable indentation entirely.</param>
/// <remarks>A non-null value for <see cref="Indentation"/> will insert additional line-breaks
/// to the JSON output. Each line will contain either a single value, or braces. The default
/// line-break is determined by <see cref="Environment.NewLine"/>, which is <c>"\n"</c> on
/// Unix platforms, and <c>"\r\n"</c> on Windows. If <see cref="JsonFormatter"/> seems to
/// produce empty lines, you need to pass a <see cref="TextWriter"/> that uses a <c>"\n"</c>
/// newline. See <see cref="JsonFormatter.Format(Google.Protobuf.IMessage, TextWriter)"/>.
/// </remarks>
public Settings WithIndentation(string indentation = " ") =>
new Settings(FormatDefaultValues, TypeRegistry, FormatEnumsAsIntegers,
PreserveProtoFieldNames, indentation);
}
// Effectively a cache of mapping from enum values to the original name as specified in the
// proto file, fetched by reflection. The need for this is unfortunate, as is its unbounded
// size, but realistically it shouldn't cause issues.
private static class OriginalEnumValueHelper {
private static readonly ConcurrentDictionary<System.Type, Dictionary<object, string>>
dictionaries = new ConcurrentDictionary<System.Type, Dictionary<object, string>>();
[UnconditionalSuppressMessage(
"Trimming", "IL2072",
Justification =
"The field for the value must still be present. It will be returned by reflection, will be in this collection, and its name can be resolved.")]
[UnconditionalSuppressMessage(
"Trimming", "IL2067",
Justification =
"The field for the value must still be present. It will be returned by reflection, will be in this collection, and its name can be resolved.")]
internal static string GetOriginalName(object value) {
// Warnings are suppressed on this method. However, this code has been tested in an AOT app
// and verified that it works. Issue
// https://github.com/protocolbuffers/protobuf/issues/14788 discusses changes to guarantee
// that enum fields are never trimmed.
Dictionary<object, string> nameMapping =
dictionaries.GetOrAdd(value.GetType(), static t => GetNameMapping(t));
// If this returns false, originalName will be null, which is what we want.
nameMapping.TryGetValue(value, out string originalName);
return originalName;
}
private static Dictionary<object, string> GetNameMapping([
DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicFields |
DynamicallyAccessedMemberTypes.NonPublicFields)
] System.Type enumType) {
return enumType.GetTypeInfo()
.DeclaredFields.Where(f => f.IsStatic)
.Where(f => f.GetCustomAttributes<OriginalNameAttribute>()
.FirstOrDefault()
?.PreferredAlias ??
true)
.ToDictionary(
f => f.GetValue(null),
f =>
f.GetCustomAttributes<OriginalNameAttribute>()
.FirstOrDefault()
// If the attribute hasn't been applied, fall back to the name of the field.
?.Name ??
f.Name);
}
}
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 63dd1577ee490464d9f5a33870c5d966
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: fb7f5bd03bb6e854a9984a9ad6227ae6
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,118 @@
#region Copyright notice and license
// Protocol Buffers - Google's data interchange format
// Copyright 2008 Google Inc. All rights reserved.
//
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file or at
// https://developers.google.com/open-source/licenses/bsd
#endregion
using System;
namespace Google.Protobuf
{
internal sealed class JsonToken : IEquatable<JsonToken>
{
internal static JsonToken Null { get; } = new JsonToken(TokenType.Null);
internal static JsonToken False { get; } = new JsonToken(TokenType.False);
internal static JsonToken True { get; } = new JsonToken(TokenType.True);
internal static JsonToken StartObject { get; } = new JsonToken(TokenType.StartObject);
internal static JsonToken EndObject { get; } = new JsonToken(TokenType.EndObject);
internal static JsonToken StartArray { get; } = new JsonToken(TokenType.StartArray);
internal static JsonToken EndArray { get; } = new JsonToken(TokenType.EndArray);
internal static JsonToken EndDocument { get; } = new JsonToken(TokenType.EndDocument);
internal static JsonToken Name(string name)
{
return new JsonToken(TokenType.Name, stringValue: name);
}
internal static JsonToken Value(string value)
{
return new JsonToken(TokenType.StringValue, stringValue: value);
}
internal static JsonToken Value(double value)
{
return new JsonToken(TokenType.Number, numberValue: value);
}
internal enum TokenType
{
Null,
False,
True,
StringValue,
Number,
Name,
StartObject,
EndObject,
StartArray,
EndArray,
EndDocument
}
// A value is a string, number, array, object, null, true or false
// Arrays and objects have start/end
// A document consists of a value
// Objects are name/value sequences.
private readonly TokenType type;
private readonly string stringValue;
private readonly double numberValue;
internal TokenType Type => type;
internal string StringValue => stringValue;
internal double NumberValue => numberValue;
private JsonToken(TokenType type, string stringValue = null, double numberValue = 0)
{
this.type = type;
this.stringValue = stringValue;
this.numberValue = numberValue;
}
public override bool Equals(object obj) => Equals(obj as JsonToken);
public override int GetHashCode()
{
unchecked
{
int hash = 17;
hash = hash * 31 + (int) type;
hash = hash * 31 + stringValue == null ? 0 : stringValue.GetHashCode();
hash = hash * 31 + numberValue.GetHashCode();
return hash;
}
}
public override string ToString()
{
return type switch
{
TokenType.Null => "null",
TokenType.True => "true",
TokenType.False => "false",
TokenType.Name => $"name ({stringValue})",
TokenType.StringValue => $"value ({stringValue})",
TokenType.Number => $"number ({numberValue})",
TokenType.StartObject => "start-object",
TokenType.EndObject => "end-object",
TokenType.StartArray => "start-array",
TokenType.EndArray => "end-array",
TokenType.EndDocument => "end-document",
_ => throw new InvalidOperationException($"Token is of unknown type {type}"),
};
}
public bool Equals(JsonToken other)
{
if (other is null)
{
return false;
}
// Note use of other.numberValue.Equals rather than ==, so that NaN compares appropriately.
return other.type == type && other.stringValue == stringValue && other.numberValue.Equals(numberValue);
}
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: b5566a936b9c2c1449e2a6e8fce23559
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,812 @@
#region Copyright notice and license
// Protocol Buffers - Google's data interchange format
// Copyright 2008 Google Inc. All rights reserved.
//
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file or at
// https://developers.google.com/open-source/licenses/bsd
#endregion
using System;
using System.Collections.Generic;
using System.Globalization;
using System.IO;
using System.Text;
namespace Google.Protobuf
{
/// <summary>
/// Simple but strict JSON tokenizer, rigidly following RFC 7159.
/// </summary>
/// <remarks>
/// <para>
/// This tokenizer is stateful, and only returns "useful" tokens - names, values etc.
/// It does not create tokens for the separator between names and values, or for the comma
/// between values. It validates the token stream as it goes - so callers can assume that the
/// tokens it produces are appropriate. For example, it would never produce "start object, end array."
/// </para>
/// <para>Implementation details: the base class handles single token push-back and </para>
/// <para>Not thread-safe.</para>
/// </remarks>
internal abstract class JsonTokenizer
{
private JsonToken bufferedToken;
/// <summary>
/// Creates a tokenizer that reads from the given text reader.
/// </summary>
internal static JsonTokenizer FromTextReader(TextReader reader)
{
return new JsonTextTokenizer(reader);
}
/// <summary>
/// Creates a tokenizer that first replays the given list of tokens, then continues reading
/// from another tokenizer. Note that if the returned tokenizer is "pushed back", that does not push back
/// on the continuation tokenizer, or vice versa. Care should be taken when using this method - it was
/// created for the sake of Any parsing.
/// </summary>
internal static JsonTokenizer FromReplayedTokens(IList<JsonToken> tokens, JsonTokenizer continuation)
{
return new JsonReplayTokenizer(tokens, continuation);
}
/// <summary>
/// Returns the depth of the stack, purely in objects (not collections).
/// Informally, this is the number of remaining unclosed '{' characters we have.
/// </summary>
internal int ObjectDepth { get; private set; }
// TODO: Why do we allow a different token to be pushed back? It might be better to always remember the previous
// token returned, and allow a parameterless Rewind() method (which could only be called once, just like the current PushBack).
internal void PushBack(JsonToken token)
{
if (bufferedToken != null)
{
throw new InvalidOperationException("Can't push back twice");
}
bufferedToken = token;
if (token.Type == JsonToken.TokenType.StartObject)
{
ObjectDepth--;
}
else if (token.Type == JsonToken.TokenType.EndObject)
{
ObjectDepth++;
}
}
/// <summary>
/// Returns the next JSON token in the stream. An EndDocument token is returned to indicate the end of the stream,
/// after which point <c>Next()</c> should not be called again.
/// </summary>
/// <remarks>This implementation provides single-token buffering, and calls <see cref="NextImpl"/> if there is no buffered token.</remarks>
/// <returns>The next token in the stream. This is never null.</returns>
/// <exception cref="InvalidOperationException">This method is called after an EndDocument token has been returned</exception>
/// <exception cref="InvalidJsonException">The input text does not comply with RFC 7159</exception>
internal JsonToken Next()
{
JsonToken tokenToReturn;
if (bufferedToken != null)
{
tokenToReturn = bufferedToken;
bufferedToken = null;
}
else
{
tokenToReturn = NextImpl();
}
if (tokenToReturn.Type == JsonToken.TokenType.StartObject)
{
ObjectDepth++;
}
else if (tokenToReturn.Type == JsonToken.TokenType.EndObject)
{
ObjectDepth--;
}
return tokenToReturn;
}
/// <summary>
/// Returns the next JSON token in the stream, when requested by the base class. (The <see cref="Next"/> method delegates
/// to this if it doesn't have a buffered token.)
/// </summary>
/// <exception cref="InvalidOperationException">This method is called after an EndDocument token has been returned</exception>
/// <exception cref="InvalidJsonException">The input text does not comply with RFC 7159</exception>
protected abstract JsonToken NextImpl();
/// <summary>
/// Skips the value we're about to read. This must only be called immediately after reading a property name.
/// If the value is an object or an array, the complete object/array is skipped.
/// </summary>
internal void SkipValue()
{
// We'll assume that Next() makes sure that the end objects and end arrays are all valid.
// All we care about is the total nesting depth we need to close.
int depth = 0;
// do/while rather than while loop so that we read at least one token.
do
{
var token = Next();
switch (token.Type)
{
case JsonToken.TokenType.EndArray:
case JsonToken.TokenType.EndObject:
depth--;
break;
case JsonToken.TokenType.StartArray:
case JsonToken.TokenType.StartObject:
depth++;
break;
}
} while (depth != 0);
}
/// <summary>
/// Tokenizer which first exhausts a list of tokens, then consults another tokenizer.
/// </summary>
private class JsonReplayTokenizer : JsonTokenizer
{
private readonly IList<JsonToken> tokens;
private readonly JsonTokenizer nextTokenizer;
private int nextTokenIndex;
internal JsonReplayTokenizer(IList<JsonToken> tokens, JsonTokenizer nextTokenizer)
{
this.tokens = tokens;
this.nextTokenizer = nextTokenizer;
}
// FIXME: Object depth not maintained...
protected override JsonToken NextImpl()
{
if (nextTokenIndex >= tokens.Count)
{
return nextTokenizer.Next();
}
return tokens[nextTokenIndex++];
}
}
/// <summary>
/// Tokenizer which does all the *real* work of parsing JSON.
/// </summary>
private sealed class JsonTextTokenizer : JsonTokenizer
{
// The set of states in which a value is valid next token.
private static readonly State ValueStates = State.ArrayStart | State.ArrayAfterComma | State.ObjectAfterColon | State.StartOfDocument;
private readonly Stack<ContainerType> containerStack = new Stack<ContainerType>();
private readonly PushBackReader reader;
private State state;
internal JsonTextTokenizer(TextReader reader)
{
this.reader = new PushBackReader(reader);
state = State.StartOfDocument;
containerStack.Push(ContainerType.Document);
}
/// <remarks>
/// This method essentially just loops through characters skipping whitespace, validating and
/// changing state (e.g. from ObjectBeforeColon to ObjectAfterColon)
/// until it reaches something which will be a genuine token (e.g. a start object, or a value) at which point
/// it returns the token. Although the method is large, it would be relatively hard to break down further... most
/// of it is the large switch statement, which sometimes returns and sometimes doesn't.
/// </remarks>
protected override JsonToken NextImpl()
{
if (state == State.ReaderExhausted)
{
throw new InvalidOperationException("Next() called after end of document");
}
while (true)
{
var next = reader.Read();
switch (next)
{
case -1:
ValidateState(State.ExpectedEndOfDocument, "Unexpected end of document in state: ");
state = State.ReaderExhausted;
return JsonToken.EndDocument;
// Skip whitespace between tokens
case ' ':
case '\t':
case '\r':
case '\n':
break;
case ':':
ValidateState(State.ObjectBeforeColon, "Invalid state to read a colon: ");
state = State.ObjectAfterColon;
break;
case ',':
ValidateState(State.ObjectAfterProperty | State.ArrayAfterValue, "Invalid state to read a comma: ");
state = state == State.ObjectAfterProperty ? State.ObjectAfterComma : State.ArrayAfterComma;
break;
case '"':
string stringValue = ReadString();
if ((state & (State.ObjectStart | State.ObjectAfterComma)) != 0)
{
state = State.ObjectBeforeColon;
return JsonToken.Name(stringValue);
}
else
{
ValidateAndModifyStateForValue("Invalid state to read a double quote: ");
return JsonToken.Value(stringValue);
}
case '{':
ValidateState(ValueStates, "Invalid state to read an open brace: ");
state = State.ObjectStart;
containerStack.Push(ContainerType.Object);
return JsonToken.StartObject;
case '}':
ValidateState(State.ObjectAfterProperty | State.ObjectStart, "Invalid state to read a close brace: ");
PopContainer();
return JsonToken.EndObject;
case '[':
ValidateState(ValueStates, "Invalid state to read an open square bracket: ");
state = State.ArrayStart;
containerStack.Push(ContainerType.Array);
return JsonToken.StartArray;
case ']':
ValidateState(State.ArrayAfterValue | State.ArrayStart, "Invalid state to read a close square bracket: ");
PopContainer();
return JsonToken.EndArray;
case 'n': // Start of null
ConsumeLiteral("null");
ValidateAndModifyStateForValue("Invalid state to read a null literal: ");
return JsonToken.Null;
case 't': // Start of true
ConsumeLiteral("true");
ValidateAndModifyStateForValue("Invalid state to read a true literal: ");
return JsonToken.True;
case 'f': // Start of false
ConsumeLiteral("false");
ValidateAndModifyStateForValue("Invalid state to read a false literal: ");
return JsonToken.False;
case '-': // Start of a number
case '0':
case '1':
case '2':
case '3':
case '4':
case '5':
case '6':
case '7':
case '8':
case '9':
double number = ReadNumber((char) next);
ValidateAndModifyStateForValue("Invalid state to read a number token: ");
return JsonToken.Value(number);
default:
throw new InvalidJsonException($"Invalid first character of token: {(char) next}");
}
}
}
private void ValidateState(State validStates, string errorPrefix)
{
if ((validStates & state) == 0)
{
throw reader.CreateException(errorPrefix + state);
}
}
/// <summary>
/// Reads a string token. It is assumed that the opening " has already been read.
/// </summary>
private string ReadString()
{
//builder will not be released in case of an exception, but this is not a problem and we will create new on next Acquire
var builder = StringBuilderCache.Acquire();
bool haveHighSurrogate = false;
while (true)
{
char c = reader.ReadOrFail("Unexpected end of text while reading string");
if (c < ' ')
{
throw reader.CreateException(string.Format(CultureInfo.InvariantCulture, "Invalid character in string literal: U+{0:x4}", (int) c));
}
if (c == '"')
{
if (haveHighSurrogate)
{
throw reader.CreateException("Invalid use of surrogate pair code units");
}
return StringBuilderCache.GetStringAndRelease(builder);
}
if (c == '\\')
{
c = ReadEscapedCharacter();
}
// TODO: Consider only allowing surrogate pairs that are either both escaped,
// or both not escaped. It would be a very odd text stream that contained a "lone" high surrogate
// followed by an escaped low surrogate or vice versa... and that couldn't even be represented in UTF-8.
if (haveHighSurrogate != char.IsLowSurrogate(c))
{
throw reader.CreateException("Invalid use of surrogate pair code units");
}
haveHighSurrogate = char.IsHighSurrogate(c);
builder.Append(c);
}
}
/// <summary>
/// Reads an escaped character. It is assumed that the leading backslash has already been read.
/// </summary>
private char ReadEscapedCharacter()
{
char c = reader.ReadOrFail("Unexpected end of text while reading character escape sequence");
return c switch
{
'n' => '\n',
'\\' => '\\',
'b' => '\b',
'f' => '\f',
'r' => '\r',
't' => '\t',
'"' => '"',
'/' => '/',
'u' => ReadUnicodeEscape(),
_ => throw reader.CreateException(string.Format(CultureInfo.InvariantCulture, "Invalid character in character escape sequence: U+{0:x4}", (int)c)),
};
}
/// <summary>
/// Reads an escaped Unicode 4-nybble hex sequence. It is assumed that the leading \u has already been read.
/// </summary>
private char ReadUnicodeEscape()
{
int result = 0;
for (int i = 0; i < 4; i++)
{
char c = reader.ReadOrFail("Unexpected end of text while reading Unicode escape sequence");
int nybble;
if (c >= '0' && c <= '9')
{
nybble = c - '0';
}
else if (c >= 'a' && c <= 'f')
{
nybble = c - 'a' + 10;
}
else if (c >= 'A' && c <= 'F')
{
nybble = c - 'A' + 10;
}
else
{
throw reader.CreateException(string.Format(CultureInfo.InvariantCulture, "Invalid character in character escape sequence: U+{0:x4}", (int) c));
}
result = (result << 4) + nybble;
}
return (char) result;
}
/// <summary>
/// Consumes a text-only literal, throwing an exception if the read text doesn't match it.
/// It is assumed that the first letter of the literal has already been read.
/// </summary>
private void ConsumeLiteral(string text)
{
for (int i = 1; i < text.Length; i++)
{
int next = reader.Read();
if (next != text[i])
{
// Only check for "end of text" when we've detected that the character differs from the
// expected one.
var message = next == -1
? $"Unexpected end of text while reading literal token {text}"
: $"Unexpected character while reading literal token {text}";
throw reader.CreateException(message);
}
}
}
private double ReadNumber(char initialCharacter)
{
//builder will not be released in case of an exception, but this is not a problem and we will create new on next Acquire
var builder = StringBuilderCache.Acquire();
if (initialCharacter == '-')
{
builder.Append("-");
}
else
{
reader.PushBack(initialCharacter);
}
// Each method returns the character it read that doesn't belong in that part,
// so we know what to do next, including pushing the character back at the end.
// null is returned for "end of text".
int next = ReadInt(builder);
if (next == '.')
{
next = ReadFrac(builder);
}
if (next == 'e' || next == 'E')
{
next = ReadExp(builder);
}
// If we read a character which wasn't part of the number, push it back so we can read it again
// to parse the next token.
if (next != -1)
{
reader.PushBack((char) next);
}
// TODO: What exception should we throw if the value can't be represented as a double?
var builderValue = StringBuilderCache.GetStringAndRelease(builder);
try
{
double result = double.Parse(builderValue,
NumberStyles.AllowLeadingSign | NumberStyles.AllowDecimalPoint | NumberStyles.AllowExponent,
CultureInfo.InvariantCulture);
// .NET Core 3.0 and later returns infinity if the number is too large or small to be represented.
// For compatibility with other Protobuf implementations the tokenizer should still throw.
if (double.IsInfinity(result))
{
throw reader.CreateException("Numeric value out of range: " + builderValue);
}
return result;
}
catch (OverflowException)
{
throw reader.CreateException("Numeric value out of range: " + builderValue);
}
}
/// <summary>
/// Copies an integer into a StringBuilder.
/// </summary>
/// <param name="builder">The builder to read the number into</param>
/// <returns>The character following the integer, or -1 for end-of-text.</returns>
private int ReadInt(StringBuilder builder)
{
char first = reader.ReadOrFail("Invalid numeric literal");
if (first < '0' || first > '9')
{
throw reader.CreateException("Invalid numeric literal");
}
builder.Append(first);
int next = ConsumeDigits(builder, out int digitCount);
if (first == '0' && digitCount != 0)
{
throw reader.CreateException("Invalid numeric literal: leading 0 for non-zero value.");
}
return next;
}
/// <summary>
/// Copies the fractional part of an integer into a StringBuilder, assuming reader is positioned after a period.
/// </summary>
/// <param name="builder">The builder to read the number into</param>
/// <returns>The character following the fractional part, or -1 for end-of-text.</returns>
private int ReadFrac(StringBuilder builder)
{
builder.Append('.'); // Already consumed this
int next = ConsumeDigits(builder, out int digitCount);
if (digitCount == 0)
{
throw reader.CreateException("Invalid numeric literal: fraction with no trailing digits");
}
return next;
}
/// <summary>
/// Copies the exponent part of a number into a StringBuilder, with an assumption that the reader is already positioned after the "e".
/// </summary>
/// <param name="builder">The builder to read the number into</param>
/// <returns>The character following the exponent, or -1 for end-of-text.</returns>
private int ReadExp(StringBuilder builder)
{
builder.Append('E'); // Already consumed this (or 'e')
int next = reader.Read();
if (next == -1)
{
throw reader.CreateException("Invalid numeric literal: exponent with no trailing digits");
}
if (next == '-' || next == '+')
{
builder.Append((char) next);
}
else
{
reader.PushBack((char) next);
}
next = ConsumeDigits(builder, out int digitCount);
if (digitCount == 0)
{
throw reader.CreateException("Invalid numeric literal: exponent without value");
}
return next;
}
/// <summary>
/// Copies a sequence of digits into a StringBuilder.
/// </summary>
/// <param name="builder">The builder to read the number into</param>
/// <param name="count">The number of digits appended to the builder</param>
/// <returns>The character following the digits, or -1 for end-of-text.</returns>
private int ConsumeDigits(StringBuilder builder, out int count)
{
count = 0;
while (true)
{
int next = reader.Read();
if (next == -1 || next < '0' || next > '9')
{
return next;
}
count++;
builder.Append((char) next);
}
}
/// <summary>
/// Validates that we're in a valid state to read a value (using the given error prefix if necessary)
/// and changes the state to the appropriate one, e.g. ObjectAfterColon to ObjectAfterProperty.
/// </summary>
private void ValidateAndModifyStateForValue(string errorPrefix)
{
ValidateState(ValueStates, errorPrefix);
switch (state)
{
case State.StartOfDocument:
state = State.ExpectedEndOfDocument;
return;
case State.ObjectAfterColon:
state = State.ObjectAfterProperty;
return;
case State.ArrayStart:
case State.ArrayAfterComma:
state = State.ArrayAfterValue;
return;
default:
throw new InvalidOperationException("ValidateAndModifyStateForValue does not handle all value states (and should)");
}
}
/// <summary>
/// Pops the top-most container, and sets the state to the appropriate one for the end of a value
/// in the parent container.
/// </summary>
private void PopContainer()
{
containerStack.Pop();
var parent = containerStack.Peek();
state = parent switch
{
ContainerType.Object => State.ObjectAfterProperty,
ContainerType.Array => State.ArrayAfterValue,
ContainerType.Document => State.ExpectedEndOfDocument,
_ => throw new InvalidOperationException("Unexpected container type: " + parent),
};
}
private enum ContainerType
{
Document, Object, Array
}
/// <summary>
/// Possible states of the tokenizer.
/// </summary>
/// <remarks>
/// <para>This is a flags enum purely so we can simply and efficiently represent a set of valid states
/// for checking.</para>
/// <para>
/// Each is documented with an example,
/// where ^ represents the current position within the text stream. The examples all use string values,
/// but could be any value, including nested objects/arrays.
/// The complete state of the tokenizer also includes a stack to indicate the contexts (arrays/objects).
/// Any additional notional state of "AfterValue" indicates that a value has been completed, at which
/// point there's an immediate transition to ExpectedEndOfDocument, ObjectAfterProperty or ArrayAfterValue.
/// </para>
/// <para>
/// These states were derived manually by reading RFC 7159 carefully.
/// </para>
/// </remarks>
[Flags]
private enum State
{
/// <summary>
/// ^ { "foo": "bar" }
/// Before the value in a document. Next states: ObjectStart, ArrayStart, "AfterValue"
/// </summary>
StartOfDocument = 1 << 0,
/// <summary>
/// { "foo": "bar" } ^
/// After the value in a document. Next states: ReaderExhausted
/// </summary>
ExpectedEndOfDocument = 1 << 1,
/// <summary>
/// { "foo": "bar" } ^ (and already read to the end of the reader)
/// Terminal state.
/// </summary>
ReaderExhausted = 1 << 2,
/// <summary>
/// { ^ "foo": "bar" }
/// Before the *first* property in an object.
/// Next states:
/// "AfterValue" (empty object)
/// ObjectBeforeColon (read a name)
/// </summary>
ObjectStart = 1 << 3,
/// <summary>
/// { "foo" ^ : "bar", "x": "y" }
/// Next state: ObjectAfterColon
/// </summary>
ObjectBeforeColon = 1 << 4,
/// <summary>
/// { "foo" : ^ "bar", "x": "y" }
/// Before any property other than the first in an object.
/// (Equivalently: after any property in an object)
/// Next states:
/// "AfterValue" (value is simple)
/// ObjectStart (value is object)
/// ArrayStart (value is array)
/// </summary>
ObjectAfterColon = 1 << 5,
/// <summary>
/// { "foo" : "bar" ^ , "x" : "y" }
/// At the end of a property, so expecting either a comma or end-of-object
/// Next states: ObjectAfterComma or "AfterValue"
/// </summary>
ObjectAfterProperty = 1 << 6,
/// <summary>
/// { "foo":"bar", ^ "x":"y" }
/// Read the comma after the previous property, so expecting another property.
/// This is like ObjectStart, but closing brace isn't valid here
/// Next state: ObjectBeforeColon.
/// </summary>
ObjectAfterComma = 1 << 7,
/// <summary>
/// [ ^ "foo", "bar" ]
/// Before the *first* value in an array.
/// Next states:
/// "AfterValue" (read a value)
/// "AfterValue" (end of array; will pop stack)
/// </summary>
ArrayStart = 1 << 8,
/// <summary>
/// [ "foo" ^ , "bar" ]
/// After any value in an array, so expecting either a comma or end-of-array
/// Next states: ArrayAfterComma or "AfterValue"
/// </summary>
ArrayAfterValue = 1 << 9,
/// <summary>
/// [ "foo", ^ "bar" ]
/// After a comma in an array, so there *must* be another value (simple or complex).
/// Next states: "AfterValue" (simple value), StartObject, StartArray
/// </summary>
ArrayAfterComma = 1 << 10
}
/// <summary>
/// Wrapper around a text reader allowing small amounts of buffering and location handling.
/// </summary>
private class PushBackReader
{
// TODO: Add locations for errors etc.
private readonly TextReader reader;
internal PushBackReader(TextReader reader)
{
// TODO: Wrap the reader in a BufferedReader?
this.reader = reader;
}
/// <summary>
/// The buffered next character, if we have one, or -1 if there is no buffered character.
/// </summary>
private int nextChar = -1;
/// <summary>
/// Returns the next character in the stream, or -1 if we have reached the end of the stream.
/// </summary>
internal int Read()
{
if (nextChar != -1)
{
int tmp = nextChar;
nextChar = -1;
return tmp;
}
return reader.Read();
}
/// <summary>
/// Reads the next character from the underlying reader, throwing an <see cref="InvalidJsonException" />
/// with the specified message if there are no more characters available.
/// </summary>
internal char ReadOrFail(string messageOnFailure)
{
int next = Read();
if (next == -1)
{
throw CreateException(messageOnFailure);
}
return (char) next;
}
internal void PushBack(char c)
{
if (nextChar != -1)
{
throw new InvalidOperationException("Cannot push back when already buffering a character");
}
nextChar = c;
}
/// <summary>
/// Creates a new exception appropriate for the current state of the reader.
/// </summary>
internal InvalidJsonException CreateException(string message)
{
// TODO: Keep track of and use the location.
return new InvalidJsonException(message);
}
}
/// <summary>
/// Provide a cached reusable instance of stringbuilder per thread.
/// Copied from https://github.com/dotnet/runtime/blob/main/src/libraries/Common/src/System/Text/StringBuilderCache.cs
/// </summary>
private static class StringBuilderCache
{
private const int MaxCachedStringBuilderSize = 360;
private const int DefaultStringBuilderCapacity = 16; // == StringBuilder.DefaultCapacity
[ThreadStatic]
private static StringBuilder cachedInstance;
/// <summary>Get a StringBuilder for the specified capacity.</summary>
/// <remarks>If a StringBuilder of an appropriate size is cached, it will be returned and the cache emptied.</remarks>
public static StringBuilder Acquire(int capacity = DefaultStringBuilderCapacity)
{
if (capacity <= MaxCachedStringBuilderSize)
{
StringBuilder sb = cachedInstance;
if (sb != null)
{
// Avoid stringbuilder block fragmentation by getting a new StringBuilder
// when the requested size is larger than the current capacity
if (capacity <= sb.Capacity)
{
cachedInstance = null;
sb.Clear();
return sb;
}
}
}
return new StringBuilder(capacity);
}
/// <summary>Place the specified builder in the cache if it is not too big.</summary>
private static void Release(StringBuilder sb)
{
if (sb.Capacity <= MaxCachedStringBuilderSize)
{
cachedInstance = cachedInstance?.Capacity >= sb.Capacity ? cachedInstance : sb;
}
}
/// <summary>ToString() the stringbuilder, Release it to the cache, and return the resulting string.</summary>
public static string GetStringAndRelease(StringBuilder sb)
{
string result = sb.ToString();
Release(sb);
return result;
}
}
}
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 30766ac4e6327c34fad6f3f76582eb26
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,64 @@
#region Copyright notice and license
// Protocol Buffers - Google's data interchange format
// Copyright 2015 Google Inc. All rights reserved.
//
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file or at
// https://developers.google.com/open-source/licenses/bsd
#endregion
using System;
using System.IO;
namespace Google.Protobuf
{
/// <summary>
/// Stream implementation which proxies another stream, only allowing a certain amount
/// of data to be read. Note that this is only used to read delimited streams, so it
/// doesn't attempt to implement everything.
/// </summary>
internal sealed class LimitedInputStream : Stream
{
private readonly Stream proxied;
private int bytesLeft;
internal LimitedInputStream(Stream proxied, int size)
{
this.proxied = proxied;
bytesLeft = size;
}
public override bool CanRead => true;
public override bool CanSeek => false;
public override bool CanWrite => false;
public override void Flush()
{
}
public override long Length => throw new NotSupportedException();
public override long Position
{
get => throw new NotSupportedException();
set => throw new NotSupportedException();
}
public override int Read(byte[] buffer, int offset, int count)
{
if (bytesLeft > 0)
{
int bytesRead = proxied.Read(buffer, offset, Math.Min(bytesLeft, count));
bytesLeft -= bytesRead;
return bytesRead;
}
return 0;
}
public override long Seek(long offset, SeekOrigin origin) => throw new NotSupportedException();
public override void SetLength(long value) => throw new NotSupportedException();
public override void Write(byte[] buffer, int offset, int count) => throw new NotSupportedException();
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: ae351a949e399ac4bac5fa72c263c2b6
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,329 @@
#region Copyright notice and license
// Protocol Buffers - Google's data interchange format
// Copyright 2015 Google Inc. All rights reserved.
//
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file or at
// https://developers.google.com/open-source/licenses/bsd
#endregion
using Google.Protobuf.Reflection;
using System.Buffers;
using System.Collections;
using System;
using System.IO;
using System.Linq;
using System.Security;
namespace Google.Protobuf
{
/// <summary>
/// Extension methods on <see cref="IMessage"/> and <see cref="IMessage{T}"/>.
/// </summary>
public static class MessageExtensions
{
/// <summary>
/// Merges data from the given byte array into an existing message.
/// </summary>
/// <param name="message">The message to merge the data into.</param>
/// <param name="data">The data to merge, which must be protobuf-encoded binary data.</param>
public static void MergeFrom(this IMessage message, byte[] data) =>
MergeFrom(message, data, false, null);
/// <summary>
/// Merges data from the given byte array slice into an existing message.
/// </summary>
/// <param name="message">The message to merge the data into.</param>
/// <param name="data">The data containing the slice to merge, which must be protobuf-encoded binary data.</param>
/// <param name="offset">The offset of the slice to merge.</param>
/// <param name="length">The length of the slice to merge.</param>
public static void MergeFrom(this IMessage message, byte[] data, int offset, int length) =>
MergeFrom(message, data, offset, length, false, null);
public static void MergeFromEx(this IMessage message, CodedInputStream input, byte[] data, int offset, int length) =>
MergeFrom(message, input, data, offset, length, false, null);
/// <summary>
/// Merges data from the given byte string into an existing message.
/// </summary>
/// <param name="message">The message to merge the data into.</param>
/// <param name="data">The data to merge, which must be protobuf-encoded binary data.</param>
public static void MergeFrom(this IMessage message, ByteString data) =>
MergeFrom(message, data, false, null);
/// <summary>
/// Merges data from the given stream into an existing message.
/// </summary>
/// <param name="message">The message to merge the data into.</param>
/// <param name="input">Stream containing the data to merge, which must be protobuf-encoded binary data.</param>
public static void MergeFrom(this IMessage message, Stream input) =>
MergeFrom(message, input, false, null);
/// <summary>
/// Merges data from the given span into an existing message.
/// </summary>
/// <param name="message">The message to merge the data into.</param>
/// <param name="span">Span containing the data to merge, which must be protobuf-encoded binary data.</param>
[SecuritySafeCritical]
public static void MergeFrom(this IMessage message, ReadOnlySpan<byte> span) =>
MergeFrom(message, span, false, null);
/// <summary>
/// Merges data from the given sequence into an existing message.
/// </summary>
/// <param name="message">The message to merge the data into.</param>
/// <param name="sequence">Sequence from the specified data to merge, which must be protobuf-encoded binary data.</param>
[SecuritySafeCritical]
public static void MergeFrom(this IMessage message, ReadOnlySequence<byte> sequence) =>
MergeFrom(message, sequence, false, null);
/// <summary>
/// Merges length-delimited data from the given stream into an existing message.
/// </summary>
/// <remarks>
/// The stream is expected to contain a length and then the data. Only the amount of data
/// specified by the length will be consumed.
/// </remarks>
/// <param name="message">The message to merge the data into.</param>
/// <param name="input">Stream containing the data to merge, which must be protobuf-encoded binary data.</param>
public static void MergeDelimitedFrom(this IMessage message, Stream input) =>
MergeDelimitedFrom(message, input, false, null);
/// <summary>
/// Converts the given message into a byte array in protobuf encoding.
/// </summary>
/// <param name="message">The message to convert.</param>
/// <returns>The message data as a byte array.</returns>
public static byte[] ToByteArray(this IMessage message)
{
ProtoPreconditions.CheckNotNull(message, nameof(message));
byte[] result = new byte[message.CalculateSize()];
CodedOutputStream output = new CodedOutputStream(result);
message.WriteTo(output);
output.CheckNoSpaceLeft();
return result;
}
/// <summary>
/// Writes the given message data to the given stream in protobuf encoding.
/// </summary>
/// <param name="message">The message to write to the stream.</param>
/// <param name="output">The stream to write to.</param>
public static void WriteTo(this IMessage message, Stream output)
{
ProtoPreconditions.CheckNotNull(message, nameof(message));
ProtoPreconditions.CheckNotNull(output, nameof(output));
CodedOutputStream codedOutput = new CodedOutputStream(output);
message.WriteTo(codedOutput);
codedOutput.Flush();
}
/// <summary>
/// Writes the length and then data of the given message to a stream.
/// </summary>
/// <param name="message">The message to write.</param>
/// <param name="output">The output stream to write to.</param>
public static void WriteDelimitedTo(this IMessage message, Stream output)
{
ProtoPreconditions.CheckNotNull(message, nameof(message));
ProtoPreconditions.CheckNotNull(output, nameof(output));
CodedOutputStream codedOutput = new CodedOutputStream(output);
codedOutput.WriteLength(message.CalculateSize());
message.WriteTo(codedOutput);
codedOutput.Flush();
}
/// <summary>
/// Converts the given message into a byte string in protobuf encoding.
/// </summary>
/// <param name="message">The message to convert.</param>
/// <returns>The message data as a byte string.</returns>
public static ByteString ToByteString(this IMessage message)
{
ProtoPreconditions.CheckNotNull(message, nameof(message));
return ByteString.AttachBytes(message.ToByteArray());
}
/// <summary>
/// Writes the given message data to the given buffer writer in protobuf encoding.
/// </summary>
/// <param name="message">The message to write to the stream.</param>
/// <param name="output">The stream to write to.</param>
[SecuritySafeCritical]
public static void WriteTo(this IMessage message, IBufferWriter<byte> output)
{
ProtoPreconditions.CheckNotNull(message, nameof(message));
ProtoPreconditions.CheckNotNull(output, nameof(output));
WriteContext.Initialize(output, out WriteContext ctx);
WritingPrimitivesMessages.WriteRawMessage(ref ctx, message);
ctx.Flush();
}
/// <summary>
/// Writes the given message data to the given span in protobuf encoding.
/// The size of the destination span needs to fit the serialized size
/// of the message exactly, otherwise an exception is thrown.
/// </summary>
/// <param name="message">The message to write to the stream.</param>
/// <param name="output">The span to write to. Size must match size of the message exactly.</param>
[SecuritySafeCritical]
public static void WriteTo(this IMessage message, Span<byte> output)
{
ProtoPreconditions.CheckNotNull(message, nameof(message));
WriteContext.Initialize(ref output, out WriteContext ctx);
WritingPrimitivesMessages.WriteRawMessage(ref ctx, message);
ctx.CheckNoSpaceLeft();
}
/// <summary>
/// Checks if all required fields in a message have values set. For proto3 messages, this returns true.
/// </summary>
public static bool IsInitialized(this IMessage message)
{
if (message.Descriptor.File.Edition == Edition.Proto3)
{
return true;
}
if (!message.Descriptor.IsExtensionsInitialized(message))
{
return false;
}
return message.Descriptor
.Fields
.InDeclarationOrder()
.All(f =>
{
if (f.IsMap)
{
var valueField = f.MessageType.Fields[2];
if (valueField.FieldType == FieldType.Message)
{
var map = (IDictionary)f.Accessor.GetValue(message);
return map.Values.Cast<IMessage>().All(IsInitialized);
}
else
{
return true;
}
}
else if (f.IsRepeated && f.FieldType == FieldType.Message || f.FieldType == FieldType.Group)
{
var enumerable = (IEnumerable)f.Accessor.GetValue(message);
return enumerable.Cast<IMessage>().All(IsInitialized);
}
else if (f.FieldType == FieldType.Message || f.FieldType == FieldType.Group)
{
if (f.Accessor.HasValue(message))
{
return ((IMessage)f.Accessor.GetValue(message)).IsInitialized();
}
else
{
return !f.IsRequired;
}
}
else if (f.IsRequired)
{
return f.Accessor.HasValue(message);
}
else
{
return true;
}
});
}
// Implementations allowing unknown fields to be discarded.
internal static void MergeFrom(this IMessage message, byte[] data, bool discardUnknownFields, ExtensionRegistry registry)
{
ProtoPreconditions.CheckNotNull(message, nameof(message));
ProtoPreconditions.CheckNotNull(data, nameof(data));
CodedInputStream input = new CodedInputStream(data)
{
DiscardUnknownFields = discardUnknownFields,
ExtensionRegistry = registry
};
message.MergeFrom(input);
input.CheckReadEndOfStreamTag();
}
internal static void MergeFrom(this IMessage message, byte[] data, int offset, int length, bool discardUnknownFields, ExtensionRegistry registry)
{
ProtoPreconditions.CheckNotNull(message, nameof(message));
ProtoPreconditions.CheckNotNull(data, nameof(data));
CodedInputStream input = new CodedInputStream(data, offset, length)
{
DiscardUnknownFields = discardUnknownFields,
ExtensionRegistry = registry
};
message.MergeFrom(input);
input.CheckReadEndOfStreamTag();
}
internal static void MergeFrom(this IMessage message, CodedInputStream input, byte[] data, int offset, int length, bool discardUnknownFields, ExtensionRegistry registry)
{
ProtoPreconditions.CheckNotNull(message, nameof(message));
ProtoPreconditions.CheckNotNull(data, nameof(data));
input.Reset(data, offset, length, discardUnknownFields, registry);
message.MergeFrom(input);
input.CheckReadEndOfStreamTag();
}
internal static void MergeFrom(this IMessage message, ByteString data, bool discardUnknownFields, ExtensionRegistry registry)
{
ProtoPreconditions.CheckNotNull(message, nameof(message));
ProtoPreconditions.CheckNotNull(data, nameof(data));
CodedInputStream input = data.CreateCodedInput();
input.DiscardUnknownFields = discardUnknownFields;
input.ExtensionRegistry = registry;
message.MergeFrom(input);
input.CheckReadEndOfStreamTag();
}
internal static void MergeFrom(this IMessage message, Stream input, bool discardUnknownFields, ExtensionRegistry registry)
{
ProtoPreconditions.CheckNotNull(message, nameof(message));
ProtoPreconditions.CheckNotNull(input, nameof(input));
CodedInputStream codedInput = new CodedInputStream(input)
{
DiscardUnknownFields = discardUnknownFields,
ExtensionRegistry = registry
};
message.MergeFrom(codedInput);
codedInput.CheckReadEndOfStreamTag();
}
[SecuritySafeCritical]
internal static void MergeFrom(this IMessage message, ReadOnlySequence<byte> data, bool discardUnknownFields, ExtensionRegistry registry)
{
ParseContext.Initialize(data, out ParseContext ctx);
ctx.DiscardUnknownFields = discardUnknownFields;
ctx.ExtensionRegistry = registry;
ParsingPrimitivesMessages.ReadRawMessage(ref ctx, message);
ParsingPrimitivesMessages.CheckReadEndOfStreamTag(ref ctx.state);
}
[SecuritySafeCritical]
internal static void MergeFrom(this IMessage message, ReadOnlySpan<byte> data, bool discardUnknownFields, ExtensionRegistry registry)
{
ParseContext.Initialize(data, out ParseContext ctx);
ctx.DiscardUnknownFields = discardUnknownFields;
ctx.ExtensionRegistry = registry;
ParsingPrimitivesMessages.ReadRawMessage(ref ctx, message);
ParsingPrimitivesMessages.CheckReadEndOfStreamTag(ref ctx.state);
}
internal static void MergeDelimitedFrom(this IMessage message, Stream input, bool discardUnknownFields, ExtensionRegistry registry)
{
ProtoPreconditions.CheckNotNull(message, nameof(message));
ProtoPreconditions.CheckNotNull(input, nameof(input));
int size = (int)CodedInputStream.ReadRawVarint32(input);
Stream limitedStream = new LimitedInputStream(input, size);
MergeFrom(message, limitedStream, discardUnknownFields, registry);
}
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 21e36c25ae9776b4e90fba4c2f6ed3eb
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,388 @@
#region Copyright notice and license
// Protocol Buffers - Google's data interchange format
// Copyright 2015 Google Inc. All rights reserved.
//
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file or at
// https://developers.google.com/open-source/licenses/bsd
#endregion
using System;
using System.Buffers;
using System.IO;
using System.Security;
namespace Google.Protobuf
{
/// <summary>
/// A general message parser, typically used by reflection-based code as all the methods
/// return simple <see cref="IMessage"/>.
/// </summary>
public class MessageParser
{
private readonly Func<IMessage> factory;
private protected bool DiscardUnknownFields { get; }
internal ExtensionRegistry Extensions { get; }
internal MessageParser(Func<IMessage> factory, bool discardUnknownFields, ExtensionRegistry extensions)
{
this.factory = factory;
DiscardUnknownFields = discardUnknownFields;
Extensions = extensions;
}
/// <summary>
/// Creates a template instance ready for population.
/// </summary>
/// <returns>An empty message.</returns>
internal IMessage CreateTemplate()
{
return factory();
}
/// <summary>
/// Parses a message from a byte array.
/// </summary>
/// <param name="data">The byte array containing the message. Must not be null.</param>
/// <returns>The newly parsed message.</returns>
public IMessage ParseFrom(byte[] data)
{
IMessage message = factory();
message.MergeFrom(data, DiscardUnknownFields, Extensions);
return message;
}
/// <summary>
/// Parses a message from a byte array slice.
/// </summary>
/// <param name="data">The byte array containing the message. Must not be null.</param>
/// <param name="offset">The offset of the slice to parse.</param>
/// <param name="length">The length of the slice to parse.</param>
/// <returns>The newly parsed message.</returns>
public IMessage ParseFrom(byte[] data, int offset, int length)
{
IMessage message = factory();
message.MergeFrom(data, offset, length, DiscardUnknownFields, Extensions);
return message;
}
/// <summary>
/// Parses a message from the given byte string.
/// </summary>
/// <param name="data">The data to parse.</param>
/// <returns>The parsed message.</returns>
public IMessage ParseFrom(ByteString data)
{
IMessage message = factory();
message.MergeFrom(data, DiscardUnknownFields, Extensions);
return message;
}
/// <summary>
/// Parses a message from the given stream.
/// </summary>
/// <param name="input">The stream to parse.</param>
/// <returns>The parsed message.</returns>
public IMessage ParseFrom(Stream input)
{
IMessage message = factory();
message.MergeFrom(input, DiscardUnknownFields, Extensions);
return message;
}
/// <summary>
/// Parses a message from the given sequence.
/// </summary>
/// <param name="data">The data to parse.</param>
/// <returns>The parsed message.</returns>
[SecuritySafeCritical]
public IMessage ParseFrom(ReadOnlySequence<byte> data)
{
IMessage message = factory();
message.MergeFrom(data, DiscardUnknownFields, Extensions);
return message;
}
/// <summary>
/// Parses a message from the given span.
/// </summary>
/// <param name="data">The data to parse.</param>
/// <returns>The parsed message.</returns>
[SecuritySafeCritical]
public IMessage ParseFrom(ReadOnlySpan<byte> data)
{
IMessage message = factory();
message.MergeFrom(data, DiscardUnknownFields, Extensions);
return message;
}
/// <summary>
/// Parses a length-delimited message from the given stream.
/// </summary>
/// <remarks>
/// The stream is expected to contain a length and then the data. Only the amount of data
/// specified by the length will be consumed.
/// </remarks>
/// <param name="input">The stream to parse.</param>
/// <returns>The parsed message.</returns>
public IMessage ParseDelimitedFrom(Stream input)
{
IMessage message = factory();
message.MergeDelimitedFrom(input, DiscardUnknownFields, Extensions);
return message;
}
/// <summary>
/// Parses a message from the given coded input stream.
/// </summary>
/// <param name="input">The stream to parse.</param>
/// <returns>The parsed message.</returns>
public IMessage ParseFrom(CodedInputStream input)
{
IMessage message = factory();
MergeFrom(message, input);
return message;
}
/// <summary>
/// Parses a message from the given JSON.
/// </summary>
/// <remarks>This method always uses the default JSON parser; it is not affected by <see cref="WithDiscardUnknownFields(bool)"/>.
/// To ignore unknown fields when parsing JSON, create a <see cref="JsonParser"/> using a <see cref="JsonParser.Settings"/>
/// with <see cref="JsonParser.Settings.IgnoreUnknownFields"/> set to true and call <see cref="JsonParser.Parse{T}(string)"/> directly.
/// </remarks>
/// <param name="json">The JSON to parse.</param>
/// <returns>The parsed message.</returns>
/// <exception cref="InvalidJsonException">The JSON does not comply with RFC 7159</exception>
/// <exception cref="InvalidProtocolBufferException">The JSON does not represent a Protocol Buffers message correctly</exception>
public IMessage ParseJson(string json)
{
IMessage message = factory();
JsonParser.Default.Merge(message, json);
return message;
}
// TODO: When we're using a C# 7.1 compiler, make this private protected.
internal void MergeFrom(IMessage message, CodedInputStream codedInput)
{
bool originalDiscard = codedInput.DiscardUnknownFields;
ExtensionRegistry originalRegistry = codedInput.ExtensionRegistry;
try
{
codedInput.DiscardUnknownFields = DiscardUnknownFields;
codedInput.ExtensionRegistry = Extensions;
message.MergeFrom(codedInput);
}
finally
{
codedInput.DiscardUnknownFields = originalDiscard;
codedInput.ExtensionRegistry = originalRegistry;
}
}
/// <summary>
/// Creates a new message parser which optionally discards unknown fields when parsing.
/// </summary>
/// <remarks>Note that this does not affect the behavior of <see cref="ParseJson(string)"/>
/// at all. To ignore unknown fields when parsing JSON, create a <see cref="JsonParser"/> using a <see cref="JsonParser.Settings"/>
/// with <see cref="JsonParser.Settings.IgnoreUnknownFields"/> set to true and call <see cref="JsonParser.Parse{T}(string)"/> directly.</remarks>
/// <param name="discardUnknownFields">Whether or not to discard unknown fields when parsing.</param>
/// <returns>A newly configured message parser.</returns>
public MessageParser WithDiscardUnknownFields(bool discardUnknownFields) =>
new MessageParser(factory, discardUnknownFields, Extensions);
/// <summary>
/// Creates a new message parser which registers extensions from the specified registry upon creating the message instance
/// </summary>
/// <param name="registry">The extensions to register</param>
/// <returns>A newly configured message parser.</returns>
public MessageParser WithExtensionRegistry(ExtensionRegistry registry) =>
new MessageParser(factory, DiscardUnknownFields, registry);
}
/// <summary>
/// A parser for a specific message type.
/// </summary>
/// <remarks>
/// <p>
/// This delegates most behavior to the
/// <see cref="IMessage.MergeFrom"/> implementation within the original type, but
/// provides convenient overloads to parse from a variety of sources.
/// </p>
/// <p>
/// Most applications will never need to create their own instances of this type;
/// instead, use the static <c>Parser</c> property of a generated message type to obtain a
/// parser for that type.
/// </p>
/// </remarks>
/// <typeparam name="T">The type of message to be parsed.</typeparam>
public sealed class MessageParser<T> : MessageParser where T : IMessage<T>
{
// Implementation note: all the methods here *could* just delegate up to the base class and cast the result.
// The current implementation avoids a virtual method call and a cast, which *may* be significant in some cases.
// Benchmarking work is required to measure the significance - but it's only a few lines of code in any case.
// The API wouldn't change anyway - just the implementation - so this work can be deferred.
private readonly Func<T> factory;
/// <summary>
/// Creates a new parser.
/// </summary>
/// <remarks>
/// The factory method is effectively an optimization over using a generic constraint
/// to require a parameterless constructor: delegates are significantly faster to execute.
/// </remarks>
/// <param name="factory">Function to invoke when a new, empty message is required.</param>
public MessageParser(Func<T> factory) : this(factory, false, null)
{
}
internal MessageParser(Func<T> factory, bool discardUnknownFields, ExtensionRegistry extensions) : base(() => factory(), discardUnknownFields, extensions)
{
this.factory = factory;
}
/// <summary>
/// Creates a template instance ready for population.
/// </summary>
/// <returns>An empty message.</returns>
internal new T CreateTemplate()
{
return factory();
}
/// <summary>
/// Parses a message from a byte array.
/// </summary>
/// <param name="data">The byte array containing the message. Must not be null.</param>
/// <returns>The newly parsed message.</returns>
public new T ParseFrom(byte[] data)
{
T message = factory();
message.MergeFrom(data, DiscardUnknownFields, Extensions);
return message;
}
/// <summary>
/// Parses a message from a byte array slice.
/// </summary>
/// <param name="data">The byte array containing the message. Must not be null.</param>
/// <param name="offset">The offset of the slice to parse.</param>
/// <param name="length">The length of the slice to parse.</param>
/// <returns>The newly parsed message.</returns>
public new T ParseFrom(byte[] data, int offset, int length)
{
T message = factory();
message.MergeFrom(data, offset, length, DiscardUnknownFields, Extensions);
return message;
}
/// <summary>
/// Parses a message from the given byte string.
/// </summary>
/// <param name="data">The data to parse.</param>
/// <returns>The parsed message.</returns>
public new T ParseFrom(ByteString data)
{
T message = factory();
message.MergeFrom(data, DiscardUnknownFields, Extensions);
return message;
}
/// <summary>
/// Parses a message from the given stream.
/// </summary>
/// <param name="input">The stream to parse.</param>
/// <returns>The parsed message.</returns>
public new T ParseFrom(Stream input)
{
T message = factory();
message.MergeFrom(input, DiscardUnknownFields, Extensions);
return message;
}
/// <summary>
/// Parses a message from the given sequence.
/// </summary>
/// <param name="data">The data to parse.</param>
/// <returns>The parsed message.</returns>
[SecuritySafeCritical]
public new T ParseFrom(ReadOnlySequence<byte> data)
{
T message = factory();
message.MergeFrom(data, DiscardUnknownFields, Extensions);
return message;
}
/// <summary>
/// Parses a message from the given span.
/// </summary>
/// <param name="data">The data to parse.</param>
/// <returns>The parsed message.</returns>
[SecuritySafeCritical]
public new T ParseFrom(ReadOnlySpan<byte> data)
{
T message = factory();
message.MergeFrom(data, DiscardUnknownFields, Extensions);
return message;
}
/// <summary>
/// Parses a length-delimited message from the given stream.
/// </summary>
/// <remarks>
/// The stream is expected to contain a length and then the data. Only the amount of data
/// specified by the length will be consumed.
/// </remarks>
/// <param name="input">The stream to parse.</param>
/// <returns>The parsed message.</returns>
public new T ParseDelimitedFrom(Stream input)
{
T message = factory();
message.MergeDelimitedFrom(input, DiscardUnknownFields, Extensions);
return message;
}
/// <summary>
/// Parses a message from the given coded input stream.
/// </summary>
/// <param name="input">The stream to parse.</param>
/// <returns>The parsed message.</returns>
public new T ParseFrom(CodedInputStream input)
{
T message = factory();
MergeFrom(message, input);
return message;
}
/// <summary>
/// Parses a message from the given JSON.
/// </summary>
/// <param name="json">The JSON to parse.</param>
/// <returns>The parsed message.</returns>
/// <exception cref="InvalidJsonException">The JSON does not comply with RFC 7159</exception>
/// <exception cref="InvalidProtocolBufferException">The JSON does not represent a Protocol Buffers message correctly</exception>
public new T ParseJson(string json)
{
T message = factory();
JsonParser.Default.Merge(message, json);
return message;
}
/// <summary>
/// Creates a new message parser which optionally discards unknown fields when parsing.
/// </summary>
/// <param name="discardUnknownFields">Whether or not to discard unknown fields when parsing.</param>
/// <returns>A newly configured message parser.</returns>
public new MessageParser<T> WithDiscardUnknownFields(bool discardUnknownFields) =>
new MessageParser<T>(factory, discardUnknownFields, Extensions);
/// <summary>
/// Creates a new message parser which registers extensions from the specified registry upon creating the message instance
/// </summary>
/// <param name="registry">The extensions to register</param>
/// <returns>A newly configured message parser.</returns>
public new MessageParser<T> WithExtensionRegistry(ExtensionRegistry registry) =>
new MessageParser<T>(factory, DiscardUnknownFields, registry);
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 6d71cc2c43a7ec447a0133fdba29db40
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,42 @@
#region Copyright notice and license
// Protocol Buffers - Google's data interchange format
// Copyright 2015 Google Inc. All rights reserved.
//
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file or at
// https://developers.google.com/open-source/licenses/bsd
#endregion
using System;
namespace Google.Protobuf
{
/// <summary>
/// Struct used to hold the keys for the fieldByNumber table in DescriptorPool and the keys for the
/// extensionByNumber table in ExtensionRegistry.
/// </summary>
internal struct ObjectIntPair<T> : IEquatable<ObjectIntPair<T>> where T : class
{
private readonly int number;
private readonly T obj;
internal ObjectIntPair(T obj, int number)
{
this.number = number;
this.obj = obj;
}
public bool Equals(ObjectIntPair<T> other)
{
return obj == other.obj
&& number == other.number;
}
public override bool Equals(object obj) => obj is ObjectIntPair<T> pair && Equals(pair);
public override int GetHashCode()
{
return obj.GetHashCode() * ((1 << 16) - 1) + number;
}
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 05d3b3a4013f0bd40b93830291806f1c
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,267 @@
#region Copyright notice and license
// Protocol Buffers - Google's data interchange format
// Copyright 2008 Google Inc. All rights reserved.
//
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file or at
// https://developers.google.com/open-source/licenses/bsd
#endregion
using System;
using System.Buffers;
using System.Runtime.CompilerServices;
using System.Security;
namespace Google.Protobuf
{
/// <summary>
/// An opaque struct that represents the current parsing state and is passed along
/// as the parsing proceeds.
/// All the public methods are intended to be invoked only by the generated code,
/// users should never invoke them directly.
/// </summary>
[SecuritySafeCritical]
public ref struct ParseContext
{
internal const int DefaultRecursionLimit = 100;
internal const int DefaultSizeLimit = int.MaxValue;
internal ReadOnlySpan<byte> buffer;
internal ParserInternalState state;
/// <summary>
/// Initialize a <see cref="ParseContext"/>, building all <see cref="ParserInternalState"/> from defaults and
/// the given <paramref name="buffer"/>.
/// </summary>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
internal static void Initialize(ReadOnlySpan<byte> buffer, out ParseContext ctx)
{
ParserInternalState state = default;
state.sizeLimit = DefaultSizeLimit;
state.recursionLimit = DefaultRecursionLimit;
state.currentLimit = int.MaxValue;
state.bufferSize = buffer.Length;
// Equivalent to Initialize(buffer, ref state, out ctx);
ctx.buffer = buffer;
ctx.state = state;
}
/// <summary>
/// Initialize a <see cref="ParseContext"/> using existing <see cref="ParserInternalState"/>, e.g. from <see cref="CodedInputStream"/>.
/// </summary>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
internal static void Initialize(ReadOnlySpan<byte> buffer, ref ParserInternalState state, out ParseContext ctx)
{
// Note: if this code ever changes, also change the initialization above.
ctx.buffer = buffer;
ctx.state = state;
}
/// <summary>
/// Creates a ParseContext instance from CodedInputStream.
/// WARNING: internally this copies the CodedInputStream's state, so after done with the ParseContext,
/// the CodedInputStream's state needs to be updated.
/// </summary>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
internal static void Initialize(CodedInputStream input, out ParseContext ctx)
{
ctx.buffer = new ReadOnlySpan<byte>(input.InternalBuffer);
// ideally we would use a reference to the original state, but that doesn't seem possible
// so we just copy the struct that holds the state. We will need to later store the state back
// into CodedInputStream if we want to keep it usable.
ctx.state = input.InternalState;
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
internal static void Initialize(ReadOnlySequence<byte> input, out ParseContext ctx)
{
Initialize(input, DefaultRecursionLimit, out ctx);
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
internal static void Initialize(ReadOnlySequence<byte> input, int recursionLimit, out ParseContext ctx)
{
ctx.buffer = default;
ctx.state = default;
ctx.state.lastTag = 0;
ctx.state.recursionDepth = 0;
ctx.state.sizeLimit = DefaultSizeLimit;
ctx.state.recursionLimit = recursionLimit;
ctx.state.currentLimit = int.MaxValue;
SegmentedBufferHelper.Initialize(input, out ctx.state.segmentedBufferHelper, out ctx.buffer);
ctx.state.bufferPos = 0;
ctx.state.bufferSize = ctx.buffer.Length;
ctx.state.DiscardUnknownFields = false;
ctx.state.ExtensionRegistry = null;
}
/// <summary>
/// Returns the last tag read, or 0 if no tags have been read or we've read beyond
/// the end of the input.
/// </summary>
internal uint LastTag => state.lastTag;
/// <summary>
/// Internal-only property; when set to true, unknown fields will be discarded while parsing.
/// </summary>
internal bool DiscardUnknownFields
{
get => state.DiscardUnknownFields;
set => state.DiscardUnknownFields = value;
}
/// <summary>
/// Internal-only property; provides extension identifiers to compatible messages while parsing.
/// </summary>
internal ExtensionRegistry ExtensionRegistry
{
get => state.ExtensionRegistry;
set => state.ExtensionRegistry = value;
}
/// <summary>
/// Reads a field tag, returning the tag of 0 for "end of input".
/// </summary>
/// <remarks>
/// If this method returns 0, it doesn't necessarily mean the end of all
/// the data in this CodedInputReader; it may be the end of the logical input
/// for an embedded message, for example.
/// </remarks>
/// <returns>The next field tag, or 0 for end of input. (0 is never a valid tag.)</returns>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public uint ReadTag() => ParsingPrimitives.ParseTag(ref buffer, ref state);
/// <summary>
/// Reads a double field from the input.
/// </summary>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public double ReadDouble() => ParsingPrimitives.ParseDouble(ref buffer, ref state);
/// <summary>
/// Reads a float field from the input.
/// </summary>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public float ReadFloat() => ParsingPrimitives.ParseFloat(ref buffer, ref state);
/// <summary>
/// Reads a uint64 field from the input.
/// </summary>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public ulong ReadUInt64() => ParsingPrimitives.ParseRawVarint64(ref buffer, ref state);
/// <summary>
/// Reads an int64 field from the input.
/// </summary>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public long ReadInt64() => (long)ParsingPrimitives.ParseRawVarint64(ref buffer, ref state);
/// <summary>
/// Reads an int32 field from the input.
/// </summary>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public int ReadInt32() => (int)ParsingPrimitives.ParseRawVarint32(ref buffer, ref state);
/// <summary>
/// Reads a fixed64 field from the input.
/// </summary>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public ulong ReadFixed64() => ParsingPrimitives.ParseRawLittleEndian64(ref buffer, ref state);
/// <summary>
/// Reads a fixed32 field from the input.
/// </summary>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public uint ReadFixed32() => ParsingPrimitives.ParseRawLittleEndian32(ref buffer, ref state);
/// <summary>
/// Reads a bool field from the input.
/// </summary>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public bool ReadBool() => ParsingPrimitives.ParseRawVarint64(ref buffer, ref state) != 0;
/// <summary>
/// Reads a string field from the input.
/// </summary>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public string ReadString() => ParsingPrimitives.ReadString(ref buffer, ref state);
/// <summary>
/// Reads an embedded message field value from the input.
/// </summary>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public void ReadMessage(IMessage message) => ParsingPrimitivesMessages.ReadMessage(ref this, message);
/// <summary>
/// Reads an embedded group field from the input.
/// </summary>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public void ReadGroup(IMessage message) => ParsingPrimitivesMessages.ReadGroup(ref this, message);
/// <summary>
/// Reads a bytes field value from the input.
/// </summary>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public ByteString ReadBytes() => ParsingPrimitives.ReadBytes(ref buffer, ref state);
/// <summary>
/// Reads a uint32 field value from the input.
/// </summary>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public uint ReadUInt32() => ParsingPrimitives.ParseRawVarint32(ref buffer, ref state);
/// <summary>
/// Reads an enum field value from the input.
/// </summary>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public int ReadEnum()
{
// Currently just a pass-through, but it's nice to separate it logically from WriteInt32.
return (int)ParsingPrimitives.ParseRawVarint32(ref buffer, ref state);
}
/// <summary>
/// Reads an sfixed32 field value from the input.
/// </summary>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public int ReadSFixed32() => (int)ParsingPrimitives.ParseRawLittleEndian32(ref buffer, ref state);
/// <summary>
/// Reads an sfixed64 field value from the input.
/// </summary>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public long ReadSFixed64() => (long)ParsingPrimitives.ParseRawLittleEndian64(ref buffer, ref state);
/// <summary>
/// Reads an sint32 field value from the input.
/// </summary>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public int ReadSInt32() => ParsingPrimitives.DecodeZigZag32(ParsingPrimitives.ParseRawVarint32(ref buffer, ref state));
/// <summary>
/// Reads an sint64 field value from the input.
/// </summary>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public long ReadSInt64() => ParsingPrimitives.DecodeZigZag64(ParsingPrimitives.ParseRawVarint64(ref buffer, ref state));
/// <summary>
/// Reads a length for length-delimited data.
/// </summary>
/// <remarks>
/// This is internally just reading a varint, but this method exists
/// to make the calling code clearer.
/// </remarks>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public int ReadLength() => (int)ParsingPrimitives.ParseRawVarint32(ref buffer, ref state);
internal void CopyStateTo(CodedInputStream input)
{
input.InternalState = state;
}
internal void LoadStateFrom(CodedInputStream input)
{
state = input.InternalState;
}
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 88566e0590b24674796b6cf6437074cd
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,80 @@
#region Copyright notice and license
// Protocol Buffers - Google's data interchange format
// Copyright 2008 Google Inc. All rights reserved.
//
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file or at
// https://developers.google.com/open-source/licenses/bsd
#endregion
namespace Google.Protobuf
{
// warning: this is a mutable struct, so it needs to be only passed as a ref!
internal struct ParserInternalState
{
// NOTE: the Span representing the current buffer is kept separate so that this doesn't have to be a ref struct and so it can
// be included in CodedInputStream's internal state
/// <summary>
/// The position within the current buffer (i.e. the next byte to read)
/// </summary>
internal int bufferPos;
/// <summary>
/// Size of the current buffer
/// </summary>
internal int bufferSize;
/// <summary>
/// If we are currently inside a length-delimited block, this is the number of
/// bytes in the buffer that are still available once we leave the delimited block.
/// </summary>
internal int bufferSizeAfterLimit;
/// <summary>
/// The absolute position of the end of the current length-delimited block (including totalBytesRetired)
/// </summary>
internal int currentLimit;
/// <summary>
/// The total number of consumed before the start of the current buffer. The
/// total bytes read up to the current position can be computed as
/// totalBytesRetired + bufferPos.
/// </summary>
internal int totalBytesRetired;
internal int recursionDepth; // current recursion depth
internal SegmentedBufferHelper segmentedBufferHelper;
/// <summary>
/// The last tag we read. 0 indicates we've read to the end of the stream
/// (or haven't read anything yet).
/// </summary>
internal uint lastTag;
/// <summary>
/// The next tag, used to store the value read by PeekTag.
/// </summary>
internal uint nextTag;
internal bool hasNextTag;
// these fields are configuration, they should be readonly
internal int sizeLimit;
internal int recursionLimit;
// If non-null, the top level parse method was started with given coded input stream as an argument
// which also means we can potentially fallback to calling MergeFrom(CodedInputStream cis) if needed.
internal CodedInputStream CodedInputStream => segmentedBufferHelper.CodedInputStream;
/// <summary>
/// Internal-only property; when set to true, unknown fields will be discarded while parsing.
/// </summary>
internal bool DiscardUnknownFields { get; set; }
/// <summary>
/// Internal-only property; provides extension identifiers to compatible messages while parsing.
/// </summary>
internal ExtensionRegistry ExtensionRegistry { get; set; }
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 40ba41373ca7a574aa07ee5da3f4107e
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,789 @@
#region Copyright notice and license
// Protocol Buffers - Google's data interchange format
// Copyright 2008 Google Inc. All rights reserved.
//
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file or at
// https://developers.google.com/open-source/licenses/bsd
#endregion
using System;
using System.Buffers;
using System.Buffers.Binary;
using System.Collections.Generic;
using System.IO;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
using System.Security;
namespace Google.Protobuf
{
/// <summary>
/// Primitives for parsing protobuf wire format.
/// </summary>
[SecuritySafeCritical]
internal static class ParsingPrimitives
{
private const int StackallocThreshold = 256;
/// <summary>
/// Reads a length for length-delimited data.
/// </summary>
/// <remarks>
/// This is internally just reading a varint, but this method exists
/// to make the calling code clearer.
/// </remarks>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static int ParseLength(ref ReadOnlySpan<byte> buffer, ref ParserInternalState state)
{
return (int)ParseRawVarint32(ref buffer, ref state);
}
/// <summary>
/// Parses the next tag.
/// If the end of logical stream was reached, an invalid tag of 0 is returned.
/// </summary>
public static uint ParseTag(ref ReadOnlySpan<byte> buffer, ref ParserInternalState state)
{
// The "nextTag" logic is there only as an optimization for reading non-packed repeated / map
// fields and is strictly speaking not necessary.
// TODO: look into simplifying the ParseTag logic.
if (state.hasNextTag)
{
state.lastTag = state.nextTag;
state.hasNextTag = false;
return state.lastTag;
}
// Optimize for the incredibly common case of having at least two bytes left in the buffer,
// and those two bytes being enough to get the tag. This will be true for fields up to 4095.
if (state.bufferPos + 2 <= state.bufferSize)
{
int tmp = buffer[state.bufferPos++];
if (tmp < 128)
{
state.lastTag = (uint)tmp;
}
else
{
int result = tmp & 0x7f;
if ((tmp = buffer[state.bufferPos++]) < 128)
{
result |= tmp << 7;
state.lastTag = (uint) result;
}
else
{
// Nope, rewind and go the potentially slow route.
state.bufferPos -= 2;
state.lastTag = ParsingPrimitives.ParseRawVarint32(ref buffer, ref state);
}
}
}
else
{
if (SegmentedBufferHelper.IsAtEnd(ref buffer, ref state))
{
state.lastTag = 0;
return 0;
}
state.lastTag = ParsingPrimitives.ParseRawVarint32(ref buffer, ref state);
}
if (WireFormat.GetTagFieldNumber(state.lastTag) == 0)
{
// If we actually read a tag with a field of 0, that's not a valid tag.
throw InvalidProtocolBufferException.InvalidTag();
}
return state.lastTag;
}
/// <summary>
/// Peeks at the next tag in the stream. If it matches <paramref name="tag"/>,
/// the tag is consumed and the method returns <c>true</c>; otherwise, the
/// stream is left in the original position and the method returns <c>false</c>.
/// </summary>
public static bool MaybeConsumeTag(ref ReadOnlySpan<byte> buffer, ref ParserInternalState state, uint tag)
{
if (PeekTag(ref buffer, ref state) == tag)
{
state.hasNextTag = false;
return true;
}
return false;
}
/// <summary>
/// Peeks at the next field tag. This is like calling <see cref="ParseTag"/>, but the
/// tag is not consumed. (So a subsequent call to <see cref="ParseTag"/> will return the
/// same value.)
/// </summary>
public static uint PeekTag(ref ReadOnlySpan<byte> buffer, ref ParserInternalState state)
{
if (state.hasNextTag)
{
return state.nextTag;
}
uint savedLast = state.lastTag;
state.nextTag = ParseTag(ref buffer, ref state);
state.hasNextTag = true;
state.lastTag = savedLast; // Undo the side effect of ReadTag
return state.nextTag;
}
/// <summary>
/// Parses a raw varint.
/// </summary>
public static ulong ParseRawVarint64(ref ReadOnlySpan<byte> buffer, ref ParserInternalState state)
{
if (state.bufferPos + 10 > state.bufferSize)
{
return ParseRawVarint64SlowPath(ref buffer, ref state);
}
ulong result = buffer[state.bufferPos++];
if (result < 128)
{
return result;
}
result &= 0x7f;
int shift = 7;
do
{
byte b = buffer[state.bufferPos++];
result |= (ulong)(b & 0x7F) << shift;
if (b < 0x80)
{
return result;
}
shift += 7;
}
while (shift < 64);
throw InvalidProtocolBufferException.MalformedVarint();
}
private static ulong ParseRawVarint64SlowPath(ref ReadOnlySpan<byte> buffer, ref ParserInternalState state)
{
int shift = 0;
ulong result = 0;
do
{
byte b = ReadRawByte(ref buffer, ref state);
result |= (ulong)(b & 0x7F) << shift;
if (b < 0x80)
{
return result;
}
shift += 7;
}
while (shift < 64);
throw InvalidProtocolBufferException.MalformedVarint();
}
/// <summary>
/// Parses a raw Varint. If larger than 32 bits, discard the upper bits.
/// This method is optimised for the case where we've got lots of data in the buffer.
/// That means we can check the size just once, then just read directly from the buffer
/// without constant rechecking of the buffer length.
/// </summary>
public static uint ParseRawVarint32(ref ReadOnlySpan<byte> buffer, ref ParserInternalState state)
{
if (state.bufferPos + 5 > state.bufferSize)
{
return ParseRawVarint32SlowPath(ref buffer, ref state);
}
int tmp = buffer[state.bufferPos++];
if (tmp < 128)
{
return (uint)tmp;
}
int result = tmp & 0x7f;
if ((tmp = buffer[state.bufferPos++]) < 128)
{
result |= tmp << 7;
}
else
{
result |= (tmp & 0x7f) << 7;
if ((tmp = buffer[state.bufferPos++]) < 128)
{
result |= tmp << 14;
}
else
{
result |= (tmp & 0x7f) << 14;
if ((tmp = buffer[state.bufferPos++]) < 128)
{
result |= tmp << 21;
}
else
{
result |= (tmp & 0x7f) << 21;
result |= (tmp = buffer[state.bufferPos++]) << 28;
if (tmp >= 128)
{
// Discard upper 32 bits.
// Note that this has to use ReadRawByte() as we only ensure we've
// got at least 5 bytes at the start of the method. This lets us
// use the fast path in more cases, and we rarely hit this section of code.
for (int i = 0; i < 5; i++)
{
if (ReadRawByte(ref buffer, ref state) < 128)
{
return (uint) result;
}
}
throw InvalidProtocolBufferException.MalformedVarint();
}
}
}
}
return (uint)result;
}
private static uint ParseRawVarint32SlowPath(ref ReadOnlySpan<byte> buffer, ref ParserInternalState state)
{
int tmp = ReadRawByte(ref buffer, ref state);
if (tmp < 128)
{
return (uint) tmp;
}
int result = tmp & 0x7f;
if ((tmp = ReadRawByte(ref buffer, ref state)) < 128)
{
result |= tmp << 7;
}
else
{
result |= (tmp & 0x7f) << 7;
if ((tmp = ReadRawByte(ref buffer, ref state)) < 128)
{
result |= tmp << 14;
}
else
{
result |= (tmp & 0x7f) << 14;
if ((tmp = ReadRawByte(ref buffer, ref state)) < 128)
{
result |= tmp << 21;
}
else
{
result |= (tmp & 0x7f) << 21;
result |= (tmp = ReadRawByte(ref buffer, ref state)) << 28;
if (tmp >= 128)
{
// Discard upper 32 bits.
for (int i = 0; i < 5; i++)
{
if (ReadRawByte(ref buffer, ref state) < 128)
{
return (uint) result;
}
}
throw InvalidProtocolBufferException.MalformedVarint();
}
}
}
}
return (uint) result;
}
/// <summary>
/// Parses a 32-bit little-endian integer.
/// </summary>
public static uint ParseRawLittleEndian32(ref ReadOnlySpan<byte> buffer, ref ParserInternalState state)
{
const int uintLength = sizeof(uint);
const int ulongLength = sizeof(ulong);
if (state.bufferPos + ulongLength > state.bufferSize)
{
return ParseRawLittleEndian32SlowPath(ref buffer, ref state);
}
// ReadUInt32LittleEndian is many times slower than ReadUInt64LittleEndian (at least on some runtimes)
// so it's faster better to use ReadUInt64LittleEndian and truncate the result.
uint result = (uint) BinaryPrimitives.ReadUInt64LittleEndian(buffer.Slice(state.bufferPos, ulongLength));
state.bufferPos += uintLength;
return result;
}
private static uint ParseRawLittleEndian32SlowPath(ref ReadOnlySpan<byte> buffer, ref ParserInternalState state)
{
uint b1 = ReadRawByte(ref buffer, ref state);
uint b2 = ReadRawByte(ref buffer, ref state);
uint b3 = ReadRawByte(ref buffer, ref state);
uint b4 = ReadRawByte(ref buffer, ref state);
return b1 | (b2 << 8) | (b3 << 16) | (b4 << 24);
}
/// <summary>
/// Parses a 64-bit little-endian integer.
/// </summary>
public static ulong ParseRawLittleEndian64(ref ReadOnlySpan<byte> buffer, ref ParserInternalState state)
{
const int length = sizeof(ulong);
if (state.bufferPos + length > state.bufferSize)
{
return ParseRawLittleEndian64SlowPath(ref buffer, ref state);
}
ulong result = BinaryPrimitives.ReadUInt64LittleEndian(buffer.Slice(state.bufferPos, length));
state.bufferPos += length;
return result;
}
private static ulong ParseRawLittleEndian64SlowPath(ref ReadOnlySpan<byte> buffer, ref ParserInternalState state)
{
ulong b1 = ReadRawByte(ref buffer, ref state);
ulong b2 = ReadRawByte(ref buffer, ref state);
ulong b3 = ReadRawByte(ref buffer, ref state);
ulong b4 = ReadRawByte(ref buffer, ref state);
ulong b5 = ReadRawByte(ref buffer, ref state);
ulong b6 = ReadRawByte(ref buffer, ref state);
ulong b7 = ReadRawByte(ref buffer, ref state);
ulong b8 = ReadRawByte(ref buffer, ref state);
return b1 | (b2 << 8) | (b3 << 16) | (b4 << 24)
| (b5 << 32) | (b6 << 40) | (b7 << 48) | (b8 << 56);
}
/// <summary>
/// Parses a double value.
/// </summary>
public static double ParseDouble(ref ReadOnlySpan<byte> buffer, ref ParserInternalState state)
{
const int length = sizeof(double);
if (!BitConverter.IsLittleEndian || state.bufferPos + length > state.bufferSize)
{
return BitConverter.Int64BitsToDouble((long)ParseRawLittleEndian64(ref buffer, ref state));
}
// ReadUnaligned uses processor architecture for endianness.
double result = Unsafe.ReadUnaligned<double>(ref MemoryMarshal.GetReference(buffer.Slice(state.bufferPos, length)));
state.bufferPos += length;
return result;
}
/// <summary>
/// Parses a float value.
/// </summary>
public static float ParseFloat(ref ReadOnlySpan<byte> buffer, ref ParserInternalState state)
{
const int length = sizeof(float);
if (!BitConverter.IsLittleEndian || state.bufferPos + length > state.bufferSize)
{
return ParseFloatSlow(ref buffer, ref state);
}
// ReadUnaligned uses processor architecture for endianness.
float result = Unsafe.ReadUnaligned<float>(ref MemoryMarshal.GetReference(buffer.Slice(state.bufferPos, length)));
state.bufferPos += length;
return result;
}
private static unsafe float ParseFloatSlow(ref ReadOnlySpan<byte> buffer, ref ParserInternalState state)
{
const int length = sizeof(float);
byte* stackBuffer = stackalloc byte[length];
Span<byte> tempSpan = new Span<byte>(stackBuffer, length);
for (int i = 0; i < length; i++)
{
tempSpan[i] = ReadRawByte(ref buffer, ref state);
}
// Content is little endian. Reverse if needed to match endianness of architecture.
if (!BitConverter.IsLittleEndian)
{
tempSpan.Reverse();
}
return Unsafe.ReadUnaligned<float>(ref MemoryMarshal.GetReference(tempSpan));
}
/// <summary>
/// Reads a fixed size of bytes from the input.
/// </summary>
/// <exception cref="InvalidProtocolBufferException">
/// the end of the stream or the current limit was reached
/// </exception>
public static byte[] ReadRawBytes(ref ReadOnlySpan<byte> buffer, ref ParserInternalState state, int size)
{
if (size < 0)
{
throw InvalidProtocolBufferException.NegativeSize();
}
if (size <= state.bufferSize - state.bufferPos)
{
// We have all the bytes we need already.
byte[] bytes = new byte[size];
buffer.Slice(state.bufferPos, size).CopyTo(bytes);
state.bufferPos += size;
return bytes;
}
return ReadRawBytesSlow(ref buffer, ref state, size);
}
private static byte[] ReadRawBytesSlow(ref ReadOnlySpan<byte> buffer, ref ParserInternalState state, int size)
{
ValidateCurrentLimit(ref buffer, ref state, size);
if ((!state.segmentedBufferHelper.TotalLength.HasValue && size < buffer.Length) ||
IsDataAvailableInSource(ref state, size))
{
// Reading more bytes than are in the buffer, but not an excessive number
// of bytes. We can safely allocate the resulting array ahead of time.
byte[] bytes = new byte[size];
ReadRawBytesIntoSpan(ref buffer, ref state, size, bytes);
return bytes;
}
else
{
// The size is very large. For security reasons, we can't allocate the
// entire byte array yet. The size comes directly from the input, so a
// maliciously-crafted message could provide a bogus very large size in
// order to trick the app into allocating a lot of memory. We avoid this
// by allocating and reading only a small chunk at a time, so that the
// malicious message must actually *be* extremely large to cause
// problems. Meanwhile, we limit the allowed size of a message elsewhere.
List<byte[]> chunks = new List<byte[]>();
int pos = state.bufferSize - state.bufferPos;
byte[] firstChunk = new byte[pos];
buffer.Slice(state.bufferPos, pos).CopyTo(firstChunk);
chunks.Add(firstChunk);
state.bufferPos = state.bufferSize;
// Read all the rest of the bytes we need.
int sizeLeft = size - pos;
while (sizeLeft > 0)
{
state.segmentedBufferHelper.RefillBuffer(ref buffer, ref state, true);
byte[] chunk = new byte[Math.Min(sizeLeft, state.bufferSize)];
buffer.Slice(0, chunk.Length)
.CopyTo(chunk);
state.bufferPos += chunk.Length;
sizeLeft -= chunk.Length;
chunks.Add(chunk);
}
// OK, got everything. Now concatenate it all into one buffer.
byte[] bytes = new byte[size];
int newPos = 0;
foreach (byte[] chunk in chunks)
{
Buffer.BlockCopy(chunk, 0, bytes, newPos, chunk.Length);
newPos += chunk.Length;
}
// Done.
return bytes;
}
}
/// <summary>
/// Reads and discards <paramref name="size"/> bytes.
/// </summary>
/// <exception cref="InvalidProtocolBufferException">the end of the stream
/// or the current limit was reached</exception>
public static void SkipRawBytes(ref ReadOnlySpan<byte> buffer, ref ParserInternalState state, int size)
{
if (size < 0)
{
throw InvalidProtocolBufferException.NegativeSize();
}
ValidateCurrentLimit(ref buffer, ref state, size);
if (size <= state.bufferSize - state.bufferPos)
{
// We have all the bytes we need already.
state.bufferPos += size;
}
else
{
// Skipping more bytes than are in the buffer. First skip what we have.
int pos = state.bufferSize - state.bufferPos;
state.bufferPos = state.bufferSize;
// TODO: If our segmented buffer is backed by a Stream that is seekable, we could skip the bytes more efficiently
// by simply updating stream's Position property. This used to be supported in the past, but the support was dropped
// because it would make the segmentedBufferHelper more complex. Support can be reintroduced if needed.
state.segmentedBufferHelper.RefillBuffer(ref buffer, ref state, true);
while (size - pos > state.bufferSize)
{
pos += state.bufferSize;
state.bufferPos = state.bufferSize;
state.segmentedBufferHelper.RefillBuffer(ref buffer, ref state, true);
}
state.bufferPos = size - pos;
}
}
/// <summary>
/// Reads a string field value from the input.
/// </summary>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static string ReadString(ref ReadOnlySpan<byte> buffer, ref ParserInternalState state)
{
int length = ParsingPrimitives.ParseLength(ref buffer, ref state);
return ParsingPrimitives.ReadRawString(ref buffer, ref state, length);
}
/// <summary>
/// Reads a bytes field value from the input.
/// </summary>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static ByteString ReadBytes(ref ReadOnlySpan<byte> buffer, ref ParserInternalState state)
{
int length = ParsingPrimitives.ParseLength(ref buffer, ref state);
return ByteString.AttachBytes(ParsingPrimitives.ReadRawBytes(ref buffer, ref state, length));
}
/// <summary>
/// Reads a UTF-8 string from the next "length" bytes.
/// </summary>
/// <exception cref="InvalidProtocolBufferException">
/// the end of the stream or the current limit was reached
/// </exception>
[SecuritySafeCritical]
public static string ReadRawString(ref ReadOnlySpan<byte> buffer, ref ParserInternalState state, int length)
{
// No need to read any data for an empty string.
if (length == 0)
{
return string.Empty;
}
if (length < 0)
{
throw InvalidProtocolBufferException.NegativeSize();
}
#if GOOGLE_PROTOBUF_SUPPORT_FAST_STRING
if (length <= state.bufferSize - state.bufferPos)
{
// Fast path: all bytes to decode appear in the same span.
ReadOnlySpan<byte> data = buffer.Slice(state.bufferPos, length);
string value;
unsafe
{
fixed (byte* sourceBytes = &MemoryMarshal.GetReference(data))
{
value = WritingPrimitives.Utf8Encoding.GetString(sourceBytes, length);
}
}
state.bufferPos += length;
return value;
}
#endif
return ReadStringSlow(ref buffer, ref state, length);
}
/// <summary>
/// Reads a string assuming that it is spread across multiple spans in a <see cref="ReadOnlySequence{T}"/>.
/// </summary>
private static string ReadStringSlow(ref ReadOnlySpan<byte> buffer, ref ParserInternalState state, int length)
{
ValidateCurrentLimit(ref buffer, ref state, length);
#if GOOGLE_PROTOBUF_SUPPORT_FAST_STRING
if (IsDataAvailable(ref state, length))
{
// Read string data into a temporary buffer, either stackalloc'ed or from ArrayPool
// Once all data is read then call Encoding.GetString on buffer and return to pool if needed.
byte[] byteArray = null;
Span<byte> byteSpan = length <= StackallocThreshold ?
stackalloc byte[length] :
(byteArray = ArrayPool<byte>.Shared.Rent(length));
try
{
unsafe
{
fixed (byte* pByteSpan = &MemoryMarshal.GetReference(byteSpan))
{
// Compiler doesn't like that a potentially stackalloc'd Span<byte> is being used
// in a method with a "ref Span<byte> buffer" argument. If the stackalloc'd span was assigned
// to the ref argument then bad things would happen. We'll never do that so it is ok.
// Make compiler happy by passing a new span created from pointer.
var tempSpan = new Span<byte>(pByteSpan, byteSpan.Length);
ReadRawBytesIntoSpan(ref buffer, ref state, length, tempSpan);
return WritingPrimitives.Utf8Encoding.GetString(pByteSpan, length);
}
}
}
finally
{
if (byteArray != null)
{
ArrayPool<byte>.Shared.Return(byteArray);
}
}
}
#endif
// Slow path: Build a byte array first then copy it.
// This will be called when reading from a Stream because we don't know the length of the stream,
// or there is not enough data in the sequence. If there is not enough data then ReadRawBytes will
// throw an exception.
return WritingPrimitives.Utf8Encoding.GetString(ReadRawBytes(ref buffer, ref state, length), 0, length);
}
/// <summary>
/// Validates that the specified size doesn't exceed the current limit. If it does then remaining bytes
/// are skipped and an error is thrown.
/// </summary>
private static void ValidateCurrentLimit(ref ReadOnlySpan<byte> buffer, ref ParserInternalState state, int size)
{
if (state.totalBytesRetired + state.bufferPos + size > state.currentLimit)
{
// Read to the end of the stream (up to the current limit) anyway.
SkipRawBytes(ref buffer, ref state, state.currentLimit - state.totalBytesRetired - state.bufferPos);
// Then fail.
throw InvalidProtocolBufferException.TruncatedMessage();
}
}
[SecuritySafeCritical]
private static byte ReadRawByte(ref ReadOnlySpan<byte> buffer, ref ParserInternalState state)
{
if (state.bufferPos == state.bufferSize)
{
state.segmentedBufferHelper.RefillBuffer(ref buffer, ref state, true);
}
return buffer[state.bufferPos++];
}
/// <summary>
/// Reads a varint from the input one byte at a time, so that it does not
/// read any bytes after the end of the varint. If you simply wrapped the
/// stream in a CodedInputStream and used ReadRawVarint32(Stream)
/// then you would probably end up reading past the end of the varint since
/// CodedInputStream buffers its input.
/// </summary>
/// <param name="input"></param>
/// <returns></returns>
public static uint ReadRawVarint32(Stream input)
{
int result = 0;
int offset = 0;
for (; offset < 32; offset += 7)
{
int b = input.ReadByte();
if (b == -1)
{
throw InvalidProtocolBufferException.TruncatedMessage();
}
result |= (b & 0x7f) << offset;
if ((b & 0x80) == 0)
{
return (uint) result;
}
}
// Keep reading up to 64 bits.
for (; offset < 64; offset += 7)
{
int b = input.ReadByte();
if (b == -1)
{
throw InvalidProtocolBufferException.TruncatedMessage();
}
if ((b & 0x80) == 0)
{
return (uint) result;
}
}
throw InvalidProtocolBufferException.MalformedVarint();
}
/// <summary>
/// Decode a 32-bit value with ZigZag encoding.
/// </summary>
/// <remarks>
/// ZigZag encodes signed integers into values that can be efficiently
/// encoded with varint. (Otherwise, negative values must be
/// sign-extended to 32 bits to be varint encoded, thus always taking
/// 5 bytes on the wire.)
/// </remarks>
public static int DecodeZigZag32(uint n)
{
return (int)(n >> 1) ^ -(int)(n & 1);
}
/// <summary>
/// Decode a 64-bit value with ZigZag encoding.
/// </summary>
/// <remarks>
/// ZigZag encodes signed integers into values that can be efficiently
/// encoded with varint. (Otherwise, negative values must be
/// sign-extended to 64 bits to be varint encoded, thus always taking
/// 10 bytes on the wire.)
/// </remarks>
public static long DecodeZigZag64(ulong n)
{
return (long)(n >> 1) ^ -(long)(n & 1);
}
/// <summary>
/// Checks whether there is known data available of the specified size remaining to parse.
/// When parsing from a Stream this can return false because we have no knowledge of the amount
/// of data remaining in the stream until it is read.
/// </summary>
public static bool IsDataAvailable(ref ParserInternalState state, int size)
{
// Data fits in remaining buffer
if (size <= state.bufferSize - state.bufferPos)
{
return true;
}
return IsDataAvailableInSource(ref state, size);
}
/// <summary>
/// Checks whether there is known data available of the specified size remaining to parse
/// in the underlying data source.
/// When parsing from a Stream this will return false because we have no knowledge of the amount
/// of data remaining in the stream until it is read.
/// </summary>
private static bool IsDataAvailableInSource(ref ParserInternalState state, int size)
{
// Data fits in remaining source data.
// Note that this will never be true when reading from a stream as the total length is unknown.
return size <= state.segmentedBufferHelper.TotalLength - state.totalBytesRetired - state.bufferPos;
}
/// <summary>
/// Read raw bytes of the specified length into a span. The amount of data available and the current limit should
/// be checked before calling this method.
/// </summary>
private static void ReadRawBytesIntoSpan(ref ReadOnlySpan<byte> buffer, ref ParserInternalState state, int length, Span<byte> byteSpan)
{
int remainingByteLength = length;
while (remainingByteLength > 0)
{
if (state.bufferSize - state.bufferPos == 0)
{
state.segmentedBufferHelper.RefillBuffer(ref buffer, ref state, true);
}
ReadOnlySpan<byte> unreadSpan = buffer.Slice(state.bufferPos, Math.Min(remainingByteLength, state.bufferSize - state.bufferPos));
unreadSpan.CopyTo(byteSpan.Slice(length - remainingByteLength));
remainingByteLength -= unreadSpan.Length;
state.bufferPos += unreadSpan.Length;
}
}
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 98f335fa8d02cad408ac030a02f7e29d
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,267 @@
#region Copyright notice and license
// Protocol Buffers - Google's data interchange format
// Copyright 2008 Google Inc. All rights reserved.
//
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file or at
// https://developers.google.com/open-source/licenses/bsd
#endregion
using System;
using System.Buffers;
using System.Collections.Generic;
using System.Security;
using Google.Protobuf.Collections;
namespace Google.Protobuf
{
/// <summary>
/// Reading and skipping messages / groups
/// </summary>
[SecuritySafeCritical]
internal static class ParsingPrimitivesMessages
{
private static readonly byte[] ZeroLengthMessageStreamData = new byte[] { 0 };
public static void SkipLastField(ref ReadOnlySpan<byte> buffer, ref ParserInternalState state)
{
if (state.lastTag == 0)
{
throw new InvalidOperationException("SkipLastField cannot be called at the end of a stream");
}
switch (WireFormat.GetTagWireType(state.lastTag))
{
case WireFormat.WireType.StartGroup:
SkipGroup(ref buffer, ref state, state.lastTag);
break;
case WireFormat.WireType.EndGroup:
throw new InvalidProtocolBufferException(
"SkipLastField called on an end-group tag, indicating that the corresponding start-group was missing");
case WireFormat.WireType.Fixed32:
ParsingPrimitives.ParseRawLittleEndian32(ref buffer, ref state);
break;
case WireFormat.WireType.Fixed64:
ParsingPrimitives.ParseRawLittleEndian64(ref buffer, ref state);
break;
case WireFormat.WireType.LengthDelimited:
var length = ParsingPrimitives.ParseLength(ref buffer, ref state);
ParsingPrimitives.SkipRawBytes(ref buffer, ref state, length);
break;
case WireFormat.WireType.Varint:
ParsingPrimitives.ParseRawVarint32(ref buffer, ref state);
break;
}
}
/// <summary>
/// Skip a group.
/// </summary>
public static void SkipGroup(ref ReadOnlySpan<byte> buffer, ref ParserInternalState state, uint startGroupTag)
{
// Note: Currently we expect this to be the way that groups are read. We could put the recursion
// depth changes into the ReadTag method instead, potentially...
state.recursionDepth++;
if (state.recursionDepth >= state.recursionLimit)
{
throw InvalidProtocolBufferException.RecursionLimitExceeded();
}
uint tag;
while (true)
{
tag = ParsingPrimitives.ParseTag(ref buffer, ref state);
if (tag == 0)
{
throw InvalidProtocolBufferException.TruncatedMessage();
}
// Can't call SkipLastField for this case- that would throw.
if (WireFormat.GetTagWireType(tag) == WireFormat.WireType.EndGroup)
{
break;
}
// This recursion will allow us to handle nested groups.
SkipLastField(ref buffer, ref state);
}
int startField = WireFormat.GetTagFieldNumber(startGroupTag);
int endField = WireFormat.GetTagFieldNumber(tag);
if (startField != endField)
{
throw new InvalidProtocolBufferException(
$"Mismatched end-group tag. Started with field {startField}; ended with field {endField}");
}
state.recursionDepth--;
}
public static void ReadMessage(ref ParseContext ctx, IMessage message)
{
int length = ParsingPrimitives.ParseLength(ref ctx.buffer, ref ctx.state);
if (ctx.state.recursionDepth >= ctx.state.recursionLimit)
{
throw InvalidProtocolBufferException.RecursionLimitExceeded();
}
int oldLimit = SegmentedBufferHelper.PushLimit(ref ctx.state, length);
++ctx.state.recursionDepth;
ReadRawMessage(ref ctx, message);
CheckReadEndOfStreamTag(ref ctx.state);
// Check that we've read exactly as much data as expected.
if (!SegmentedBufferHelper.IsReachedLimit(ref ctx.state))
{
throw InvalidProtocolBufferException.TruncatedMessage();
}
--ctx.state.recursionDepth;
SegmentedBufferHelper.PopLimit(ref ctx.state, oldLimit);
}
public static KeyValuePair<TKey, TValue> ReadMapEntry<TKey, TValue>(ref ParseContext ctx, MapField<TKey, TValue>.Codec codec)
{
int length = ParsingPrimitives.ParseLength(ref ctx.buffer, ref ctx.state);
if (ctx.state.recursionDepth >= ctx.state.recursionLimit)
{
throw InvalidProtocolBufferException.RecursionLimitExceeded();
}
int oldLimit = SegmentedBufferHelper.PushLimit(ref ctx.state, length);
++ctx.state.recursionDepth;
TKey key = codec.KeyCodec.DefaultValue;
TValue value = codec.ValueCodec.DefaultValue;
uint tag;
while ((tag = ctx.ReadTag()) != 0)
{
if (tag == codec.KeyCodec.Tag)
{
key = codec.KeyCodec.Read(ref ctx);
}
else if (tag == codec.ValueCodec.Tag)
{
value = codec.ValueCodec.Read(ref ctx);
}
else
{
SkipLastField(ref ctx.buffer, ref ctx.state);
}
}
// Corner case: a map entry with a key but no value, where the value type is a message.
// Read it as if we'd seen input with no data (i.e. create a "default" message).
if (value == null)
{
if (ctx.state.CodedInputStream != null)
{
// the decoded message might not support parsing from ParseContext, so
// we need to allow fallback to the legacy MergeFrom(CodedInputStream) parsing.
value = codec.ValueCodec.Read(new CodedInputStream(ZeroLengthMessageStreamData));
}
else
{
ParseContext.Initialize(new ReadOnlySequence<byte>(ZeroLengthMessageStreamData), out ParseContext zeroLengthCtx);
value = codec.ValueCodec.Read(ref zeroLengthCtx);
}
}
CheckReadEndOfStreamTag(ref ctx.state);
// Check that we've read exactly as much data as expected.
if (!SegmentedBufferHelper.IsReachedLimit(ref ctx.state))
{
throw InvalidProtocolBufferException.TruncatedMessage();
}
--ctx.state.recursionDepth;
SegmentedBufferHelper.PopLimit(ref ctx.state, oldLimit);
return new KeyValuePair<TKey, TValue>(key, value);
}
public static void ReadGroup(ref ParseContext ctx, IMessage message)
{
if (ctx.state.recursionDepth >= ctx.state.recursionLimit)
{
throw InvalidProtocolBufferException.RecursionLimitExceeded();
}
++ctx.state.recursionDepth;
uint tag = ctx.state.lastTag;
int fieldNumber = WireFormat.GetTagFieldNumber(tag);
ReadRawMessage(ref ctx, message);
CheckLastTagWas(ref ctx.state, WireFormat.MakeTag(fieldNumber, WireFormat.WireType.EndGroup));
--ctx.state.recursionDepth;
}
public static void ReadGroup(ref ParseContext ctx, int fieldNumber, UnknownFieldSet set)
{
if (ctx.state.recursionDepth >= ctx.state.recursionLimit)
{
throw InvalidProtocolBufferException.RecursionLimitExceeded();
}
++ctx.state.recursionDepth;
set.MergeGroupFrom(ref ctx);
CheckLastTagWas(ref ctx.state, WireFormat.MakeTag(fieldNumber, WireFormat.WireType.EndGroup));
--ctx.state.recursionDepth;
}
public static void ReadRawMessage(ref ParseContext ctx, IMessage message)
{
if (message is IBufferMessage bufferMessage)
{
bufferMessage.InternalMergeFrom(ref ctx);
}
else
{
// If we reached here, it means we've ran into a nested message with older generated code
// which doesn't provide the InternalMergeFrom method that takes a ParseContext.
// With a slight performance overhead, we can still parse this message just fine,
// but we need to find the original CodedInputStream instance that initiated this
// parsing process and make sure its internal state is up to date.
// Note that this performance overhead is not very high (basically copying contents of a struct)
// and it will only be incurred in case the application mixes older and newer generated code.
// Regenerating the code from .proto files will remove this overhead because it will
// generate the InternalMergeFrom method we need.
if (ctx.state.CodedInputStream == null)
{
// This can only happen when the parsing started without providing a CodedInputStream instance
// (e.g. ParseContext was created directly from a ReadOnlySequence).
// That also means that one of the new parsing APIs was used at the top level
// and in such case it is reasonable to require that all the nested message provide
// up-to-date generated code with ParseContext support (and fail otherwise).
throw new InvalidProtocolBufferException($"Message {message.GetType().Name} doesn't provide the generated method that enables ParseContext-based parsing. You might need to regenerate the generated protobuf code.");
}
ctx.CopyStateTo(ctx.state.CodedInputStream);
try
{
// fallback parse using the CodedInputStream that started current parsing tree
message.MergeFrom(ctx.state.CodedInputStream);
}
finally
{
ctx.LoadStateFrom(ctx.state.CodedInputStream);
}
}
}
/// <summary>
/// Verifies that the last call to ReadTag() returned tag 0 - in other words,
/// we've reached the end of the stream when we expected to.
/// </summary>
/// <exception cref="InvalidProtocolBufferException">The
/// tag read was not the one specified</exception>
public static void CheckReadEndOfStreamTag(ref ParserInternalState state)
{
if (state.lastTag != 0)
{
throw InvalidProtocolBufferException.MoreDataAvailable();
}
}
private static void CheckLastTagWas(ref ParserInternalState state, uint expectedTag)
{
if (state.lastTag != expectedTag) {
throw InvalidProtocolBufferException.InvalidEndTag();
}
}
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: d31c64358a03d3945ba46801f49c5870
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,324 @@
#region Copyright notice and license
// Protocol Buffers - Google's data interchange format
// Copyright 2008 Google Inc. All rights reserved.
//
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file or at
// https://developers.google.com/open-source/licenses/bsd
#endregion
using System;
using System.Security;
namespace Google.Protobuf
{
/// <summary>
/// Fast parsing primitives for wrapper types
/// </summary>
[SecuritySafeCritical]
internal static class ParsingPrimitivesWrappers
{
internal static float? ReadFloatWrapperLittleEndian(ref ReadOnlySpan<byte> buffer, ref ParserInternalState state)
{
// length:1 + tag:1 + value:4 = 6 bytes
if (state.bufferPos + 6 <= state.bufferSize)
{
// The entire wrapper message is already contained in `buffer`.
int length = buffer[state.bufferPos];
if (length == 0)
{
state.bufferPos++;
return 0F;
}
// tag:1 + value:4 = length of 5 bytes
// field=1, type=32-bit = tag of 13
if (length != 5 || buffer[state.bufferPos + 1] != 13)
{
return ReadFloatWrapperSlow(ref buffer, ref state);
}
state.bufferPos += 2;
return ParsingPrimitives.ParseFloat(ref buffer, ref state);
}
else
{
return ReadFloatWrapperSlow(ref buffer, ref state);
}
}
internal static float? ReadFloatWrapperSlow(ref ReadOnlySpan<byte> buffer, ref ParserInternalState state)
{
int length = ParsingPrimitives.ParseLength(ref buffer, ref state);
if (length == 0)
{
return 0F;
}
int finalBufferPos = state.totalBytesRetired + state.bufferPos + length;
float result = 0F;
do
{
// field=1, type=32-bit = tag of 13
if (ParsingPrimitives.ParseTag(ref buffer, ref state) == 13)
{
result = ParsingPrimitives.ParseFloat(ref buffer, ref state);
}
else
{
ParsingPrimitivesMessages.SkipLastField(ref buffer, ref state);
}
}
while (state.totalBytesRetired + state.bufferPos < finalBufferPos);
return result;
}
internal static double? ReadDoubleWrapperLittleEndian(ref ReadOnlySpan<byte> buffer, ref ParserInternalState state)
{
// length:1 + tag:1 + value:8 = 10 bytes
if (state.bufferPos + 10 <= state.bufferSize)
{
// The entire wrapper message is already contained in `buffer`.
int length = buffer[state.bufferPos];
if (length == 0)
{
state.bufferPos++;
return 0D;
}
// tag:1 + value:8 = length of 9 bytes
// field=1, type=64-bit = tag of 9
if (length != 9 || buffer[state.bufferPos + 1] != 9)
{
return ReadDoubleWrapperSlow(ref buffer, ref state);
}
state.bufferPos += 2;
return ParsingPrimitives.ParseDouble(ref buffer, ref state);
}
else
{
return ReadDoubleWrapperSlow(ref buffer, ref state);
}
}
internal static double? ReadDoubleWrapperSlow(ref ReadOnlySpan<byte> buffer, ref ParserInternalState state)
{
int length = ParsingPrimitives.ParseLength(ref buffer, ref state);
if (length == 0)
{
return 0D;
}
int finalBufferPos = state.totalBytesRetired + state.bufferPos + length;
double result = 0D;
do
{
// field=1, type=64-bit = tag of 9
if (ParsingPrimitives.ParseTag(ref buffer, ref state) == 9)
{
result = ParsingPrimitives.ParseDouble(ref buffer, ref state);
}
else
{
ParsingPrimitivesMessages.SkipLastField(ref buffer, ref state);
}
}
while (state.totalBytesRetired + state.bufferPos < finalBufferPos);
return result;
}
internal static bool? ReadBoolWrapper(ref ReadOnlySpan<byte> buffer, ref ParserInternalState state)
{
return ReadUInt64Wrapper(ref buffer, ref state) != 0;
}
internal static uint? ReadUInt32Wrapper(ref ReadOnlySpan<byte> buffer, ref ParserInternalState state)
{
// field=1, type=varint = tag of 8
const int expectedTag = 8;
// length:1 + tag:1 + value:10(varint64-max) = 12 bytes
// Value can be 64 bits for negative integers
if (state.bufferPos + 12 <= state.bufferSize)
{
// The entire wrapper message is already contained in `buffer`.
int pos0 = state.bufferPos;
int length = buffer[state.bufferPos++];
if (length == 0)
{
return 0;
}
// Length will always fit in a single byte.
if (length >= 128)
{
state.bufferPos = pos0;
return ReadUInt32WrapperSlow(ref buffer, ref state);
}
int finalBufferPos = state.bufferPos + length;
if (buffer[state.bufferPos++] != expectedTag)
{
state.bufferPos = pos0;
return ReadUInt32WrapperSlow(ref buffer, ref state);
}
var result = ParsingPrimitives.ParseRawVarint32(ref buffer, ref state);
// Verify this message only contained a single field.
if (state.bufferPos != finalBufferPos)
{
state.bufferPos = pos0;
return ReadUInt32WrapperSlow(ref buffer, ref state);
}
return result;
}
else
{
return ReadUInt32WrapperSlow(ref buffer, ref state);
}
}
internal static uint? ReadUInt32WrapperSlow(ref ReadOnlySpan<byte> buffer, ref ParserInternalState state)
{
int length = ParsingPrimitives.ParseLength(ref buffer, ref state);
if (length == 0)
{
return 0;
}
int finalBufferPos = state.totalBytesRetired + state.bufferPos + length;
uint result = 0;
do
{
// field=1, type=varint = tag of 8
if (ParsingPrimitives.ParseTag(ref buffer, ref state) == 8)
{
result = ParsingPrimitives.ParseRawVarint32(ref buffer, ref state);
}
else
{
ParsingPrimitivesMessages.SkipLastField(ref buffer, ref state);
}
}
while (state.totalBytesRetired + state.bufferPos < finalBufferPos);
return result;
}
internal static int? ReadInt32Wrapper(ref ReadOnlySpan<byte> buffer, ref ParserInternalState state)
{
return (int?)ReadUInt32Wrapper(ref buffer, ref state);
}
internal static ulong? ReadUInt64Wrapper(ref ReadOnlySpan<byte> buffer, ref ParserInternalState state)
{
// field=1, type=varint = tag of 8
const int expectedTag = 8;
// length:1 + tag:1 + value:10(varint64-max) = 12 bytes
if (state.bufferPos + 12 <= state.bufferSize)
{
// The entire wrapper message is already contained in `buffer`.
int pos0 = state.bufferPos;
int length = buffer[state.bufferPos++];
if (length == 0)
{
return 0L;
}
// Length will always fit in a single byte.
if (length >= 128)
{
state.bufferPos = pos0;
return ReadUInt64WrapperSlow(ref buffer, ref state);
}
int finalBufferPos = state.bufferPos + length;
if (buffer[state.bufferPos++] != expectedTag)
{
state.bufferPos = pos0;
return ReadUInt64WrapperSlow(ref buffer, ref state);
}
var result = ParsingPrimitives.ParseRawVarint64(ref buffer, ref state);
// Verify this message only contained a single field.
if (state.bufferPos != finalBufferPos)
{
state.bufferPos = pos0;
return ReadUInt64WrapperSlow(ref buffer, ref state);
}
return result;
}
else
{
return ReadUInt64WrapperSlow(ref buffer, ref state);
}
}
internal static ulong? ReadUInt64WrapperSlow(ref ReadOnlySpan<byte> buffer, ref ParserInternalState state)
{
// field=1, type=varint = tag of 8
const int expectedTag = 8;
int length = ParsingPrimitives.ParseLength(ref buffer, ref state);
if (length == 0)
{
return 0L;
}
int finalBufferPos = state.totalBytesRetired + state.bufferPos + length;
ulong result = 0L;
do
{
if (ParsingPrimitives.ParseTag(ref buffer, ref state) == expectedTag)
{
result = ParsingPrimitives.ParseRawVarint64(ref buffer, ref state);
}
else
{
ParsingPrimitivesMessages.SkipLastField(ref buffer, ref state);
}
}
while (state.totalBytesRetired + state.bufferPos < finalBufferPos);
return result;
}
internal static long? ReadInt64Wrapper(ref ReadOnlySpan<byte> buffer, ref ParserInternalState state)
{
return (long?)ReadUInt64Wrapper(ref buffer, ref state);
}
internal static float? ReadFloatWrapperLittleEndian(ref ParseContext ctx)
{
return ParsingPrimitivesWrappers.ReadFloatWrapperLittleEndian(ref ctx.buffer, ref ctx.state);
}
internal static float? ReadFloatWrapperSlow(ref ParseContext ctx)
{
return ParsingPrimitivesWrappers.ReadFloatWrapperSlow(ref ctx.buffer, ref ctx.state);
}
internal static double? ReadDoubleWrapperLittleEndian(ref ParseContext ctx)
{
return ParsingPrimitivesWrappers.ReadDoubleWrapperLittleEndian(ref ctx.buffer, ref ctx.state);
}
internal static double? ReadDoubleWrapperSlow(ref ParseContext ctx)
{
return ParsingPrimitivesWrappers.ReadDoubleWrapperSlow(ref ctx.buffer, ref ctx.state);
}
internal static bool? ReadBoolWrapper(ref ParseContext ctx)
{
return ParsingPrimitivesWrappers.ReadBoolWrapper(ref ctx.buffer, ref ctx.state);
}
internal static uint? ReadUInt32Wrapper(ref ParseContext ctx)
{
return ParsingPrimitivesWrappers.ReadUInt32Wrapper(ref ctx.buffer, ref ctx.state);
}
internal static int? ReadInt32Wrapper(ref ParseContext ctx)
{
return ParsingPrimitivesWrappers.ReadInt32Wrapper(ref ctx.buffer, ref ctx.state);
}
internal static ulong? ReadUInt64Wrapper(ref ParseContext ctx)
{
return ParsingPrimitivesWrappers.ReadUInt64Wrapper(ref ctx.buffer, ref ctx.state);
}
internal static ulong? ReadUInt64WrapperSlow(ref ParseContext ctx)
{
return ParsingPrimitivesWrappers.ReadUInt64WrapperSlow(ref ctx.buffer, ref ctx.state);
}
internal static long? ReadInt64Wrapper(ref ParseContext ctx)
{
return ParsingPrimitivesWrappers.ReadInt64Wrapper(ref ctx.buffer, ref ctx.state);
}
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 0927c986d1cbe2d4a833a6c3d6fdc6c6
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: e79c1a9373841064aaa1937443f9996c
folderAsset: yes
DefaultImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,30 @@
#region Copyright notice and license
// Protocol Buffers - Google's data interchange format
// Copyright 2008 Google Inc. All rights reserved.
//
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file or at
// https://developers.google.com/open-source/licenses/bsd
#endregion
using System.Runtime.CompilerServices;
using System.Security;
// General Information about an assembly is controlled through the following
// set of attributes. Change these attribute values to modify the information
// associated with an assembly.
#if !NCRUNCH
[assembly: AllowPartiallyTrustedCallers]
#endif
#if SIGNING_DISABLED
[assembly: InternalsVisibleTo("Google.Protobuf.Test")]
#else
[assembly: InternalsVisibleTo("Google.Protobuf.Test, PublicKey=" +
"002400000480000094000000060200000024000052534131000400000100010025800fbcfc63a1" +
"7c66b303aae80b03a6beaa176bb6bef883be436f2a1579edd80ce23edf151a1f4ced97af83abcd" +
"981207041fd5b2da3b498346fcfcd94910d52f25537c4a43ce3fbe17dc7d43e6cbdb4d8f1242dc" +
"b6bd9b5906be74da8daa7d7280f97130f318a16c07baf118839b156299a48522f9fae2371c9665" +
"c5ae9cb6")]
#endif

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 2c62632d01729a74e8433d32b97736a6
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,56 @@
#region Copyright notice and license
// Protocol Buffers - Google's data interchange format
// Copyright 2008 Google Inc. All rights reserved.
//
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file or at
// https://developers.google.com/open-source/licenses/bsd
#endregion
using System;
namespace Google.Protobuf
{
/// <summary>
/// Helper methods for throwing exceptions when preconditions are not met.
/// </summary>
/// <remarks>
/// This class is used internally and by generated code; it is not particularly
/// expected to be used from application code, although nothing prevents it
/// from being used that way.
/// </remarks>
public static class ProtoPreconditions
{
/// <summary>
/// Throws an ArgumentNullException if the given value is null, otherwise
/// return the value to the caller.
/// </summary>
public static T CheckNotNull<T>(T value, string name) where T : class
{
if (value == null)
{
throw new ArgumentNullException(name);
}
return value;
}
/// <summary>
/// Throws an ArgumentNullException if the given value is null, otherwise
/// return the value to the caller.
/// </summary>
/// <remarks>
/// This is equivalent to <see cref="CheckNotNull{T}(T, string)"/> but without the type parameter
/// constraint. In most cases, the constraint is useful to prevent you from calling CheckNotNull
/// with a value type - but it gets in the way if either you want to use it with a nullable
/// value type, or you want to use it with an unconstrained type parameter.
/// </remarks>
internal static T CheckNotNullUnconstrained<T>(T value, string name)
{
if (value == null)
{
throw new ArgumentNullException(name);
}
return value;
}
}
}

Some files were not shown because too many files have changed in this diff Show More