//-----------------------------------------------------------------------
//
// Copyright (c) 2018 Sirenix IVS
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
//-----------------------------------------------------------------------
namespace VRC.Udon.Serialization.OdinSerializer.Utilities
{
using System;
using System.Threading;
public interface ICache : IDisposable
{
object Value { get; }
}
///
/// Provides an easy way of claiming and freeing cached values of any non-abstract reference type with a public parameterless constructor.
///
/// Cached types which implement the interface will receive notifications when they are claimed and freed.
///
/// Only one thread should be holding a given cache instance at a time if is implemented, since the invocation of
/// is not thread safe, IE, weird stuff might happen if multiple different threads are trying to free
/// the same cache instance at the same time. This will practically never happen unless you're doing really strange stuff, but the case is documented here.
///
/// The type which is cached.
///
public sealed class Cache : ICache where T : class, new()
{
private static readonly bool IsNotificationReceiver = typeof(ICacheNotificationReceiver).IsAssignableFrom(typeof(T));
private static object[] FreeValues = new object[4];
private bool isFree;
private static volatile int THREAD_LOCK_TOKEN = 0;
private static int maxCacheSize = 5;
///
/// Gets or sets the maximum size of the cache. This value can never go beneath 1.
///
///
/// The maximum size of the cache.
///
public static int MaxCacheSize
{
get
{
return Cache.maxCacheSize;
}
set
{
Cache.maxCacheSize = Math.Max(1, value);
}
}
private Cache()
{
this.Value = new T();
this.isFree = false;
}
///
/// The cached value.
///
public T Value;
///
/// Gets a value indicating whether this cached value is free.
///
///
/// true if this cached value is free; otherwise, false.
///
public bool IsFree { get { return this.isFree; } }
object ICache.Value { get { return this.Value; } }
///
/// Claims a cached value of type .
///
/// A cached value of type .
public static Cache Claim()
{
Cache result = null;
// Very, very simple spinlock implementation
// this lock will almost never be contested
// and it will never be held for more than
// an instant; therefore, we want to avoid paying
// the lock(object) statement's semaphore
// overhead.
while (true)
{
if (Interlocked.CompareExchange(ref THREAD_LOCK_TOKEN, 1, 0) == 0)
{
break;
}
}
// We now hold the lock
var freeValues = FreeValues;
var length = freeValues.Length;
for (int i = 0; i < length; i++)
{
result = (Cache)freeValues[i];
if (!object.ReferenceEquals(result, null))
{
freeValues[i] = null;
result.isFree = false;
break;
}
}
// Release the lock
THREAD_LOCK_TOKEN = 0;
if (result == null)
{
result = new Cache();
}
if (IsNotificationReceiver)
{
(result.Value as ICacheNotificationReceiver).OnClaimed();
}
return result;
}
///
/// Releases a cached value.
///
/// The cached value to release.
/// The cached value to release is null.
public static void Release(Cache cache)
{
if (cache == null)
{
throw new ArgumentNullException("cache");
}
if (cache.isFree) return;
// No need to call this method inside the lock, which might do heavy work
// there is a thread safety hole here, actually - if several different threads
// are trying to free the same cache instance, OnFreed might be called several
// times concurrently for the same cached value.
if (IsNotificationReceiver)
{
(cache.Value as ICacheNotificationReceiver).OnFreed();
}
while (true)
{
if (Interlocked.CompareExchange(ref THREAD_LOCK_TOKEN, 1, 0) == 0)
{
break;
}
}
// We now hold the lock
if (cache.isFree)
{
// Release the lock and leave - job's done already
THREAD_LOCK_TOKEN = 0;
return;
}
cache.isFree = true;
var freeValues = FreeValues;
var length = freeValues.Length;
bool added = false;
for (int i = 0; i < length; i++)
{
if (object.ReferenceEquals(freeValues[i], null))
{
freeValues[i] = cache;
added = true;
break;
}
}
if (!added && length < MaxCacheSize)
{
var newArr = new object[length * 2];
for (int i = 0; i < length; i++)
{
newArr[i] = freeValues[i];
}
newArr[length] = cache;
FreeValues = newArr;
}
// Release the lock
THREAD_LOCK_TOKEN = 0;
}
///
/// Performs an implicit conversion from to .
///
/// The cache to convert.
///
/// The result of the conversion.
///
public static implicit operator T(Cache cache)
{
if (cache == null)
{
return default(T);
}
return cache.Value;
}
///
/// Releases this cached value.
///
public void Release()
{
Release(this);
}
///
/// Releases this cached value.
///
void IDisposable.Dispose()
{
Cache.Release(this);
}
}
}