Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
35 commits
Select commit Hold shift + click to select a range
4e7c9d3
Lazily allocate parameters
NinoFloris Aug 28, 2023
c6c356c
Take original CommandText if possible
NinoFloris Aug 28, 2023
be5f542
Improve Ensure and ReadMessage allocation profile
NinoFloris Aug 28, 2023
9925cdb
Some safe simplifications on TryFastRead and data row reads
NinoFloris Aug 29, 2023
1e614a8
Fix connector not being broken
NinoFloris Aug 29, 2023
fb000cb
Move data row method into process message
NinoFloris Aug 29, 2023
f868fa0
Allow TryRead in even more cases
NinoFloris Aug 29, 2023
363910e
Improve some more code
NinoFloris Aug 29, 2023
1514ebd
Properly handle errors in TryRead
NinoFloris Aug 29, 2023
c0c3154
Improve parse speed for resultset reading
NinoFloris Aug 30, 2023
8bb8ee1
Improve forward seeking in non-sequential mode
NinoFloris Aug 30, 2023
e4920d5
Small speedups
NinoFloris Aug 30, 2023
1b05b7c
Fix resuming non-sequential reads
NinoFloris Aug 30, 2023
8d75faa
Improve NextResult perf
NinoFloris Aug 30, 2023
86368da
Store prepared statement name as bytes
NinoFloris Aug 30, 2023
ef80569
Connection open speedups
NinoFloris Aug 30, 2023
77688ac
Modernized (and slightly faster) command complete message
NinoFloris Aug 31, 2023
149b215
Remove unused read methods
NinoFloris Sep 6, 2023
28ea806
Fixes
NinoFloris Sep 25, 2023
571e0ee
Move some more code around
NinoFloris Sep 6, 2023
9cb01fc
Have separate sync and async methods for Commit
NinoFloris Sep 6, 2023
4afd699
Remove sign extension
NinoFloris Sep 6, 2023
a3d05ce
Remove unused write buffer methods
NinoFloris Sep 6, 2023
b708278
Inline multiplexing WriteCommand
NinoFloris Sep 6, 2023
4446a20
Make metrics and counter collection pay for play
NinoFloris Sep 7, 2023
9a91c98
Replace UtcNow with cpu clock time
NinoFloris Sep 6, 2023
71693e4
Move TypeToConvert to be a field on PgConverterInfo
NinoFloris Oct 31, 2023
058b4e7
Don't lookup FieldDescription if we have a matching cached column inf…
NinoFloris Oct 31, 2023
5232c85
Reset only the absolute minimum amount of fields
NinoFloris Oct 31, 2023
174e892
Re-use property values where possible
NinoFloris Oct 31, 2023
04a3547
Write directly into oid/rows backing fields
NinoFloris Nov 1, 2023
be3127a
Add fast-path to RentArray
NinoFloris Nov 1, 2023
2c6d9f0
Move to timestamp ticks
NinoFloris Nov 1, 2023
e64441d
Rename and add comment
NinoFloris Nov 1, 2023
ecbcb9f
Address feedback
NinoFloris Nov 2, 2023
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
14 changes: 9 additions & 5 deletions src/Npgsql.GeoJSON/Internal/GeoJSONConverter.cs
Original file line number Diff line number Diff line change
Expand Up @@ -291,11 +291,15 @@ static Position ReadPosition(PgReader reader, EwkbGeometryType type, bool little
return position;

double ReadDouble(bool littleEndian)
=> littleEndian
// Netstandard is missing ReverseEndianness apis for double.
? Unsafe.As<long, double>(ref Unsafe.AsRef(
BinaryPrimitives.ReverseEndianness(Unsafe.As<double, long>(ref Unsafe.AsRef(reader.ReadDouble())))))
: reader.ReadDouble();
{
if (littleEndian)
{
var value = BinaryPrimitives.ReverseEndianness(Unsafe.As<double, long>(ref Unsafe.AsRef(reader.ReadDouble())));
return Unsafe.As<long, double>(ref value);
}

return reader.ReadDouble();
}
}
}

Expand Down
145 changes: 42 additions & 103 deletions src/Npgsql/BackendMessages/CommandCompleteMessage.cs
Original file line number Diff line number Diff line change
@@ -1,122 +1,61 @@
using System.Diagnostics;
using System;
using System.Buffers.Text;
using Npgsql.Internal;

namespace Npgsql.BackendMessages;

sealed class CommandCompleteMessage : IBackendMessage
{
uint _oid;
ulong _rows;
internal StatementType StatementType { get; private set; }
internal uint OID { get; private set; }
internal ulong Rows { get; private set; }

internal uint OID => _oid;
internal ulong Rows => _rows;

internal CommandCompleteMessage Load(NpgsqlReadBuffer buf, int len)
{
Rows = 0;
OID = 0;

var bytes = buf.Buffer;
var i = buf.ReadPosition;
var bytes = buf.Span.Slice(0, len);
buf.Skip(len);
switch (bytes[i])
{
case (byte)'I':
if (!AreEqual(bytes, i, "INSERT "))
goto default;
StatementType = StatementType.Insert;
i += 7;
OID = (uint) ParseNumber(bytes, ref i);
i++;
Rows = ParseNumber(bytes, ref i);
return this;

case (byte)'D':
if (!AreEqual(bytes, i, "DELETE "))
goto default;
StatementType = StatementType.Delete;
i += 7;
Rows = ParseNumber(bytes, ref i);
return this;

case (byte)'U':
if (!AreEqual(bytes, i, "UPDATE "))
goto default;
StatementType = StatementType.Update;
i += 7;
Rows = ParseNumber(bytes, ref i);
return this;

case (byte)'S':
if (!AreEqual(bytes, i, "SELECT "))
goto default;
StatementType = StatementType.Select;
i += 7;
Rows = ParseNumber(bytes, ref i);
return this;

case (byte)'M':
if (AreEqual(bytes, i, "MERGE "))
{
StatementType = StatementType.Merge;
i += 6;
}
else if (AreEqual(bytes, i, "MOVE "))
{
StatementType = StatementType.Move;
i += 5;
}
else
goto default;
Rows = ParseNumber(bytes, ref i);
return this;

case (byte)'F':
if (!AreEqual(bytes, i, "FETCH "))
goto default;
StatementType = StatementType.Fetch;
i += 6;
Rows = ParseNumber(bytes, ref i);
return this;

case (byte)'C':
if (AreEqual(bytes, i, "COPY "))
{
StatementType = StatementType.Copy;
i += 5;
Rows = ParseNumber(bytes, ref i);
return this;
}
if (bytes[i + 4] == 0 && AreEqual(bytes, i, "CALL"))
{
StatementType = StatementType.Call;
return this;
}
// PostgreSQL always writes these strings as ASCII, see https://github.com/postgres/postgres/blob/c8e1ba736b2b9e8c98d37a5b77c4ed31baf94147/src/backend/tcop/cmdtag.c#L130-L133
(StatementType, var argumentsStart) = Convert.ToChar(bytes[0]) switch
{
'S' when bytes.StartsWith("SELECT "u8) => (StatementType.Select, "SELECT ".Length),
'I' when bytes.StartsWith("INSERT "u8) => (StatementType.Insert, "INSERT ".Length),
'U' when bytes.StartsWith("UPDATE "u8) => (StatementType.Update, "UPDATE ".Length),
'D' when bytes.StartsWith("DELETE "u8) => (StatementType.Delete, "DELETE ".Length),
'M' when bytes.StartsWith("MERGE "u8) => (StatementType.Merge, "MERGE ".Length),
'C' when bytes.StartsWith("COPY "u8) => (StatementType.Copy, "COPY ".Length),
'C' when bytes.StartsWith("CALL"u8) => (StatementType.Call, "CALL".Length),
'M' when bytes.StartsWith("MOVE "u8) => (StatementType.Move, "MOVE ".Length),
'F' when bytes.StartsWith("FETCH "u8) => (StatementType.Fetch, "FETCH ".Length),
'C' when bytes.StartsWith("CREATE TABLE AS "u8) => (StatementType.CreateTableAs, "CREATE TABLE AS ".Length),
_ => (StatementType.Other, 0)
};

_oid = 0;
_rows = 0;

// Slice away the null terminator.
var arguments = bytes.Slice(argumentsStart, bytes.Length - argumentsStart - 1);
switch (StatementType)
{
case StatementType.Other:
case StatementType.Call:
break;
case StatementType.Insert:
if (!Utf8Parser.TryParse(arguments, out _oid, out var nextArgumentOffset))
throw new InvalidOperationException("Invalid bytes in command complete message.");
arguments = arguments.Slice(nextArgumentOffset + 1);
goto default;

default:
StatementType = StatementType.Other;
return this;
if (!Utf8Parser.TryParse(arguments, out _rows, out _))
throw new InvalidOperationException("Invalid bytes in command complete message.");
break;
}
}

static bool AreEqual(byte[] bytes, int pos, string s)
{
for (var i = 0; i < s.Length; i++)
{
if (bytes[pos+i] != s[i])
return false;
}
return true;
}

static ulong ParseNumber(byte[] bytes, ref int pos)
{
Debug.Assert(bytes[pos] >= '0' && bytes[pos] <= '9');
ulong result = 0;
do
{
result = result * 10 + bytes[pos++] - '0';
} while (bytes[pos] >= '0' && bytes[pos] <= '9');
return result;
return this;
}

public BackendMessageCode Code => BackendMessageCode.CommandComplete;
Expand Down
70 changes: 40 additions & 30 deletions src/Npgsql/BackendMessages/RowDescriptionMessage.cs
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,20 @@

namespace Npgsql.BackendMessages;

readonly struct ColumnInfo
{
public ColumnInfo(PgConverterInfo converterInfo, DataFormat dataFormat, bool asObject)
{
ConverterInfo = converterInfo;
DataFormat = dataFormat;
AsObject = asObject;
}

public PgConverterInfo ConverterInfo { get; }
public DataFormat DataFormat { get; }
public bool AsObject { get; }
}

/// <summary>
/// A RowDescription message sent from the backend.
/// </summary>
Expand All @@ -24,7 +38,7 @@ sealed class RowDescriptionMessage : IBackendMessage, IReadOnlyList<FieldDescrip
FieldDescription?[] _fields;
readonly Dictionary<string, int> _nameIndex;
Dictionary<string, int>? _insensitiveIndex;
PgConverterInfo[]? _lastConverterInfoCache;
ColumnInfo[]? _lastConverterInfoCache;

internal RowDescriptionMessage(bool connectorOwned, int numFields = 10)
{
Expand Down Expand Up @@ -119,14 +133,14 @@ public FieldDescription this[int index]
}
}

internal void SetConverterInfoCache(ReadOnlySpan<PgConverterInfo> values)
internal void SetConverterInfoCache(ReadOnlySpan<ColumnInfo> values)
{
if (_connectorOwned || _lastConverterInfoCache is not null)
return;
Interlocked.CompareExchange(ref _lastConverterInfoCache, values.ToArray(), null);
}

internal void LoadConverterInfoCache(PgConverterInfo[] values)
internal void LoadConverterInfoCache(ColumnInfo[] values)
{
if (_lastConverterInfoCache is not { } cache)
return;
Expand Down Expand Up @@ -328,17 +342,17 @@ internal void Populate(

internal Type FieldType => ObjectOrDefaultInfo.TypeToConvert;

PgConverterInfo _objectOrDefaultInfo;
ColumnInfo _objectOrDefaultInfo;
internal PgConverterInfo ObjectOrDefaultInfo
{
get
{
if (!_objectOrDefaultInfo.IsDefault)
return _objectOrDefaultInfo;
if (!_objectOrDefaultInfo.ConverterInfo.IsDefault)
return _objectOrDefaultInfo.ConverterInfo;

ref var info = ref _objectOrDefaultInfo;
GetInfo(null, ref _objectOrDefaultInfo, out _);
return info;
GetInfo(null, ref _objectOrDefaultInfo);
return info.ConverterInfo;
}
}

Expand All @@ -350,64 +364,60 @@ internal FieldDescription Clone()
return field;
}

internal void GetInfo(Type? type, ref PgConverterInfo lastConverterInfo, out bool asObject)
internal void GetInfo(Type? type, ref ColumnInfo lastColumnInfo)
{
Debug.Assert(lastConverterInfo.IsDefault || (
ReferenceEquals(_serializerOptions, lastConverterInfo.TypeInfo.Options) &&
lastConverterInfo.TypeInfo.PgTypeId == _serializerOptions.ToCanonicalTypeId(PostgresType)), "Cache is bleeding over");
Debug.Assert(lastColumnInfo.ConverterInfo.IsDefault || (
ReferenceEquals(_serializerOptions, lastColumnInfo.ConverterInfo.TypeInfo.Options) &&
lastColumnInfo.ConverterInfo.TypeInfo.PgTypeId == _serializerOptions.ToCanonicalTypeId(PostgresType)), "Cache is bleeding over");

if (!lastConverterInfo.IsDefault && lastConverterInfo.TypeToConvert == type)
{
asObject = lastConverterInfo.IsBoxingConverter;
if (!lastColumnInfo.ConverterInfo.IsDefault && lastColumnInfo.ConverterInfo.TypeToConvert == type)
return;
}

var odfInfo = DataFormat is DataFormat.Text && type is not null ? ObjectOrDefaultInfo : _objectOrDefaultInfo;
var odfInfo = DataFormat is DataFormat.Text && type is not null ? ObjectOrDefaultInfo : _objectOrDefaultInfo.ConverterInfo;
if (odfInfo is { IsDefault: false })
{
if (typeof(object) == type)
{
lastConverterInfo = odfInfo;
asObject = true;
lastColumnInfo = new(odfInfo, DataFormat, true);
return;
}
if (odfInfo.TypeToConvert == type)
{
lastConverterInfo = odfInfo;
asObject = lastConverterInfo.IsBoxingConverter;
lastColumnInfo = new(odfInfo, DataFormat, odfInfo.IsBoxingConverter);
return;
}
}

GetInfoSlow(out lastConverterInfo, out asObject);
GetInfoSlow(out lastColumnInfo);

[MethodImpl(MethodImplOptions.NoInlining)]
void GetInfoSlow(out PgConverterInfo lastConverterInfo, out bool asObject)
void GetInfoSlow(out ColumnInfo lastColumnInfo)
{
var typeInfo = AdoSerializerHelpers.GetTypeInfoForReading(type ?? typeof(object), PostgresType, _serializerOptions);
PgConverterInfo converterInfo;
switch (DataFormat)
{
case DataFormat.Binary:
// If we don't support binary we'll just throw.
lastConverterInfo = typeInfo.Bind(Field, DataFormat);
asObject = typeof(object) == type || lastConverterInfo.IsBoxingConverter;
converterInfo = typeInfo.Bind(Field, DataFormat);
lastColumnInfo = new(converterInfo, DataFormat.Binary, typeof(object) == type || converterInfo.IsBoxingConverter);
break;
default:
// For text we'll fall back to any available text converter for the expected clr type or throw.
if (!typeInfo.TryBind(Field, DataFormat, out lastConverterInfo))
if (!typeInfo.TryBind(Field, DataFormat, out converterInfo))
{
typeInfo = AdoSerializerHelpers.GetTypeInfoForReading(type ?? typeof(string), _serializerOptions.UnknownPgType, _serializerOptions);
lastConverterInfo = typeInfo.Bind(Field, DataFormat);
asObject = type != lastConverterInfo.TypeToConvert || lastConverterInfo.IsBoxingConverter;
converterInfo = typeInfo.Bind(Field, DataFormat);
lastColumnInfo = new(converterInfo, DataFormat, type != converterInfo.TypeToConvert || converterInfo.IsBoxingConverter);
}
else
asObject = typeof(object) == type || lastConverterInfo.IsBoxingConverter;
lastColumnInfo = new(converterInfo, DataFormat, typeof(object) == type || converterInfo.IsBoxingConverter);
break;
}

// We delay initializing ObjectOrDefaultInfo until after the first lookup (unless it is itself the first lookup).
// When passed in an unsupported type it allows the error to be more specific, instead of just having object/null to deal with.
if (_objectOrDefaultInfo.IsDefault && type is not null)
if (_objectOrDefaultInfo.ConverterInfo.IsDefault && type is not null)
_ = ObjectOrDefaultInfo;
}
}
Expand Down
7 changes: 4 additions & 3 deletions src/Npgsql/Internal/Converters/ArrayConverter.cs
Original file line number Diff line number Diff line change
Expand Up @@ -305,9 +305,10 @@ private protected ArrayConverter(int? expectedDimensions, PgConverterResolution
public override T Read(PgReader reader) => (T)_pgArrayConverter.Read(async: false, reader).Result;

public override ValueTask<T> ReadAsync(PgReader reader, CancellationToken cancellationToken = default)
#pragma warning disable CS9193
=> Unsafe.As<ValueTask<object>, ValueTask<T>>(ref Unsafe.AsRef(_pgArrayConverter.Read(async: true, reader, cancellationToken)));
#pragma warning restore
{
var value = _pgArrayConverter.Read(async: true, reader, cancellationToken);
return Unsafe.As<ValueTask<object>, ValueTask<T>>(ref value);
}

public override Size GetSize(SizeContext context, T values, ref object? writeState)
=> _pgArrayConverter.GetSize(context, values, ref writeState);
Expand Down
14 changes: 8 additions & 6 deletions src/Npgsql/Internal/Converters/AsyncHelpers.cs
Original file line number Diff line number Diff line change
Expand Up @@ -66,9 +66,10 @@ public Continuation(object handle, delegate*<Task, CompletionSource, void> conti
public static unsafe ValueTask<T> ComposingReadAsync<T, TEffective>(this PgConverter<T> instance, PgConverter<TEffective> effectiveConverter, PgReader reader, CancellationToken cancellationToken)
{
if (!typeof(T).IsValueType && !typeof(TEffective).IsValueType)
#pragma warning disable CS9193
return Unsafe.As<ValueTask<TEffective>, ValueTask<T>>(ref Unsafe.AsRef(effectiveConverter.ReadAsync(reader, cancellationToken)));
#pragma warning restore
{
var value = effectiveConverter.ReadAsync(reader, cancellationToken);
return Unsafe.As<ValueTask<TEffective>, ValueTask<T>>(ref value);
}
// Easy if we have all the data.
var task = effectiveConverter.ReadAsync(reader, cancellationToken);
if (task.IsCompletedSuccessfully)
Expand All @@ -90,9 +91,10 @@ static void UnboxAndComplete(Task task, CompletionSource completionSource)
public static unsafe ValueTask<T> ComposingReadAsObjectAsync<T>(this PgConverter<T> instance, PgConverter effectiveConverter, PgReader reader, CancellationToken cancellationToken)
{
if (!typeof(T).IsValueType)
#pragma warning disable CS9193
return Unsafe.As<ValueTask<object>, ValueTask<T>>(ref Unsafe.AsRef(effectiveConverter.ReadAsObjectAsync(reader, cancellationToken)));
#pragma warning restore
{
var value = effectiveConverter.ReadAsObjectAsync(reader, cancellationToken);
return Unsafe.As<ValueTask<object>, ValueTask<T>>(ref value);
}

// Easy if we have all the data.
var task = effectiveConverter.ReadAsObjectAsync(reader, cancellationToken);
Expand Down
Loading