chore: sync workspace state
This commit is contained in:
26
MVS/3DGS-Unity/Editor/Utils/CaptureScreenshot.cs
Normal file
26
MVS/3DGS-Unity/Editor/Utils/CaptureScreenshot.cs
Normal file
@@ -0,0 +1,26 @@
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
using UnityEditor;
|
||||
using UnityEngine;
|
||||
|
||||
namespace GaussianSplatting.Editor.Utils
|
||||
{
|
||||
public class CaptureScreenshot : MonoBehaviour
|
||||
{
|
||||
[MenuItem("Tools/Gaussian Splats/Debug/Capture Screenshot %g")]
|
||||
public static void CaptureShot()
|
||||
{
|
||||
int counter = 0;
|
||||
string path;
|
||||
while(true)
|
||||
{
|
||||
path = $"Shot-{counter:0000}.png";
|
||||
if (!System.IO.File.Exists(path))
|
||||
break;
|
||||
++counter;
|
||||
}
|
||||
ScreenCapture.CaptureScreenshot(path);
|
||||
Debug.Log($"Captured {path}");
|
||||
}
|
||||
}
|
||||
}
|
||||
11
MVS/3DGS-Unity/Editor/Utils/CaptureScreenshot.cs.meta
Normal file
11
MVS/3DGS-Unity/Editor/Utils/CaptureScreenshot.cs.meta
Normal file
@@ -0,0 +1,11 @@
|
||||
fileFormatVersion: 2
|
||||
guid: 6c80a2b8daebbc1449b79e5ec436f39d
|
||||
MonoImporter:
|
||||
externalObjects: {}
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
||||
executionOrder: 0
|
||||
icon: {instanceID: 0}
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
||||
274
MVS/3DGS-Unity/Editor/Utils/FilePickerControl.cs
Normal file
274
MVS/3DGS-Unity/Editor/Utils/FilePickerControl.cs
Normal file
@@ -0,0 +1,274 @@
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using UnityEditor;
|
||||
using UnityEditor.Experimental;
|
||||
using UnityEngine;
|
||||
|
||||
namespace GaussianSplatting.Editor.Utils
|
||||
{
|
||||
public class FilePickerControl
|
||||
{
|
||||
const string kLastPathPref = "nesnausk.utils.FilePickerLastPath";
|
||||
static Texture2D s_FolderIcon => EditorGUIUtility.FindTexture(EditorResources.emptyFolderIconName);
|
||||
static Texture2D s_FileIcon => EditorGUIUtility.FindTexture(EditorResources.folderIconName);
|
||||
static GUIStyle s_StyleTextFieldText;
|
||||
static GUIStyle s_StyleTextFieldDropdown;
|
||||
static readonly int kPathFieldControlID = "FilePickerPathField".GetHashCode();
|
||||
const int kIconSize = 15;
|
||||
const int kRecentPathsCount = 20;
|
||||
|
||||
public static string PathToDisplayString(string path)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(path))
|
||||
return "<none>";
|
||||
path = path.Replace('\\', '/');
|
||||
string[] parts = path.Split('/');
|
||||
|
||||
// check if filename is not some super generic one
|
||||
var baseName = Path.GetFileNameWithoutExtension(parts[^1]).ToLowerInvariant();
|
||||
if (baseName != "point_cloud" && baseName != "splat" && baseName != "input")
|
||||
return parts[^1];
|
||||
|
||||
// otherwise if filename is just some generic "point cloud" type, then take some folder names above it into account
|
||||
if (parts.Length >= 4)
|
||||
path = string.Join('/', parts.TakeLast(4));
|
||||
|
||||
path = path.Replace('/', '-');
|
||||
return path;
|
||||
}
|
||||
|
||||
class PreviousPaths
|
||||
{
|
||||
public PreviousPaths(List<string> paths)
|
||||
{
|
||||
this.paths = paths;
|
||||
UpdateContent();
|
||||
}
|
||||
public void UpdateContent()
|
||||
{
|
||||
this.content = paths.Select(p => new GUIContent(PathToDisplayString(p))).ToArray();
|
||||
}
|
||||
public List<string> paths;
|
||||
public GUIContent[] content;
|
||||
}
|
||||
Dictionary<string, PreviousPaths> m_PreviousPaths = new();
|
||||
|
||||
void PopulatePreviousPaths(string nameKey)
|
||||
{
|
||||
if (m_PreviousPaths.ContainsKey(nameKey))
|
||||
return;
|
||||
|
||||
List<string> prevPaths = new();
|
||||
for (int i = 0; i < kRecentPathsCount; ++i)
|
||||
{
|
||||
string path = EditorPrefs.GetString($"{kLastPathPref}-{nameKey}-{i}");
|
||||
if (!string.IsNullOrWhiteSpace(path))
|
||||
prevPaths.Add(path);
|
||||
}
|
||||
m_PreviousPaths.Add(nameKey, new PreviousPaths(prevPaths));
|
||||
}
|
||||
|
||||
void UpdatePreviousPaths(string nameKey, string path)
|
||||
{
|
||||
if (!m_PreviousPaths.ContainsKey(nameKey))
|
||||
{
|
||||
m_PreviousPaths.Add(nameKey, new PreviousPaths(new List<string>()));
|
||||
}
|
||||
var prevPaths = m_PreviousPaths[nameKey];
|
||||
prevPaths.paths.Remove(path);
|
||||
prevPaths.paths.Insert(0, path);
|
||||
while (prevPaths.paths.Count > kRecentPathsCount)
|
||||
prevPaths.paths.RemoveAt(prevPaths.paths.Count - 1);
|
||||
prevPaths.UpdateContent();
|
||||
|
||||
for (int i = 0; i < prevPaths.paths.Count; ++i)
|
||||
{
|
||||
EditorPrefs.SetString($"{kLastPathPref}-{nameKey}-{i}", prevPaths.paths[i]);
|
||||
}
|
||||
}
|
||||
|
||||
static bool CheckPath(string path, bool isFolder)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(path))
|
||||
return false;
|
||||
if (isFolder)
|
||||
{
|
||||
if (!Directory.Exists(path))
|
||||
return false;
|
||||
}
|
||||
else
|
||||
{
|
||||
if (!File.Exists(path))
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
static string PathAbsToStorage(string path)
|
||||
{
|
||||
path = path.Replace('\\', '/');
|
||||
var dataPath = Application.dataPath;
|
||||
if (path.StartsWith(dataPath, StringComparison.Ordinal))
|
||||
{
|
||||
path = Path.GetRelativePath($"{dataPath}/..", path);
|
||||
path = path.Replace('\\', '/');
|
||||
}
|
||||
return path;
|
||||
}
|
||||
|
||||
bool CheckAndSetNewPath(ref string path, string nameKey, bool isFolder)
|
||||
{
|
||||
path = PathAbsToStorage(path);
|
||||
if (CheckPath(path, isFolder))
|
||||
{
|
||||
EditorPrefs.SetString($"{kLastPathPref}-{nameKey}", path);
|
||||
UpdatePreviousPaths(nameKey, path);
|
||||
GUI.changed = true;
|
||||
Event.current.Use();
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
string PreviousPathsDropdown(Rect position, string value, string nameKey, bool isFolder)
|
||||
{
|
||||
PopulatePreviousPaths(nameKey);
|
||||
|
||||
if (string.IsNullOrWhiteSpace(value))
|
||||
value = EditorPrefs.GetString($"{kLastPathPref}-{nameKey}");
|
||||
|
||||
m_PreviousPaths.TryGetValue(nameKey, out var prevPaths);
|
||||
|
||||
EditorGUI.BeginDisabledGroup(prevPaths == null || prevPaths.paths.Count == 0);
|
||||
EditorGUI.BeginChangeCheck();
|
||||
int oldIndent = EditorGUI.indentLevel;
|
||||
EditorGUI.indentLevel = 0;
|
||||
int parameterIndex = EditorGUI.Popup(position, GUIContent.none, -1, prevPaths.content, s_StyleTextFieldDropdown);
|
||||
if (EditorGUI.EndChangeCheck() && parameterIndex < prevPaths.paths.Count)
|
||||
{
|
||||
string newValue = prevPaths.paths[parameterIndex];
|
||||
if (CheckAndSetNewPath(ref newValue, nameKey, isFolder))
|
||||
value = newValue;
|
||||
}
|
||||
EditorGUI.indentLevel = oldIndent;
|
||||
EditorGUI.EndDisabledGroup();
|
||||
return value;
|
||||
}
|
||||
|
||||
// null extension picks folders
|
||||
public string PathFieldGUI(Rect position, GUIContent label, string value, string extension, string nameKey)
|
||||
{
|
||||
s_StyleTextFieldText ??= new GUIStyle("TextFieldDropDownText");
|
||||
s_StyleTextFieldDropdown ??= new GUIStyle("TextFieldDropdown");
|
||||
bool isFolder = extension == null;
|
||||
|
||||
int controlId = GUIUtility.GetControlID(kPathFieldControlID, FocusType.Keyboard, position);
|
||||
Rect fullRect = EditorGUI.PrefixLabel(position, controlId, label);
|
||||
Rect textRect = new Rect(fullRect.x, fullRect.y, fullRect.width - s_StyleTextFieldDropdown.fixedWidth, fullRect.height);
|
||||
Rect dropdownRect = new Rect(textRect.xMax, fullRect.y, s_StyleTextFieldDropdown.fixedWidth, fullRect.height);
|
||||
Rect iconRect = new Rect(textRect.xMax - kIconSize, textRect.y, kIconSize, textRect.height);
|
||||
|
||||
value = PreviousPathsDropdown(dropdownRect, value, nameKey, isFolder);
|
||||
|
||||
string displayText = PathToDisplayString(value);
|
||||
|
||||
Event evt = Event.current;
|
||||
switch (evt.type)
|
||||
{
|
||||
case EventType.KeyDown:
|
||||
if (GUIUtility.keyboardControl == controlId)
|
||||
{
|
||||
if (evt.keyCode is KeyCode.Backspace or KeyCode.Delete)
|
||||
{
|
||||
value = null;
|
||||
EditorPrefs.SetString($"{kLastPathPref}-{nameKey}", "");
|
||||
GUI.changed = true;
|
||||
evt.Use();
|
||||
}
|
||||
}
|
||||
break;
|
||||
case EventType.Repaint:
|
||||
s_StyleTextFieldText.Draw(textRect, new GUIContent(displayText), controlId, DragAndDrop.activeControlID == controlId);
|
||||
GUI.DrawTexture(iconRect, isFolder ? s_FolderIcon : s_FileIcon, ScaleMode.ScaleToFit);
|
||||
break;
|
||||
case EventType.MouseDown:
|
||||
if (evt.button != 0 || !GUI.enabled)
|
||||
break;
|
||||
|
||||
if (textRect.Contains(evt.mousePosition))
|
||||
{
|
||||
if (iconRect.Contains(evt.mousePosition))
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(value))
|
||||
value = EditorPrefs.GetString($"{kLastPathPref}-{nameKey}");
|
||||
string newPath;
|
||||
string openToPath = string.Empty;
|
||||
if (isFolder)
|
||||
{
|
||||
if (Directory.Exists(value))
|
||||
openToPath = value;
|
||||
newPath = EditorUtility.OpenFolderPanel("Select folder", openToPath, "");
|
||||
}
|
||||
else
|
||||
{
|
||||
if (File.Exists(value))
|
||||
openToPath = Path.GetDirectoryName(value);
|
||||
newPath = EditorUtility.OpenFilePanel("Select file", openToPath, extension);
|
||||
}
|
||||
if (CheckAndSetNewPath(ref newPath, nameKey, isFolder))
|
||||
{
|
||||
value = newPath;
|
||||
GUI.changed = true;
|
||||
evt.Use();
|
||||
}
|
||||
}
|
||||
else if (File.Exists(value) || Directory.Exists(value))
|
||||
{
|
||||
EditorUtility.RevealInFinder(value);
|
||||
}
|
||||
GUIUtility.keyboardControl = controlId;
|
||||
}
|
||||
break;
|
||||
case EventType.DragUpdated:
|
||||
case EventType.DragPerform:
|
||||
if (textRect.Contains(evt.mousePosition) && GUI.enabled)
|
||||
{
|
||||
if (DragAndDrop.paths.Length > 0)
|
||||
{
|
||||
DragAndDrop.visualMode = DragAndDropVisualMode.Generic;
|
||||
string path = DragAndDrop.paths[0];
|
||||
path = PathAbsToStorage(path);
|
||||
if (CheckPath(path, isFolder))
|
||||
{
|
||||
if (evt.type == EventType.DragPerform)
|
||||
{
|
||||
UpdatePreviousPaths(nameKey, path);
|
||||
value = path;
|
||||
GUI.changed = true;
|
||||
DragAndDrop.AcceptDrag();
|
||||
DragAndDrop.activeControlID = 0;
|
||||
}
|
||||
else
|
||||
DragAndDrop.activeControlID = controlId;
|
||||
}
|
||||
else
|
||||
DragAndDrop.visualMode = DragAndDropVisualMode.Rejected;
|
||||
evt.Use();
|
||||
}
|
||||
}
|
||||
break;
|
||||
case EventType.DragExited:
|
||||
if (GUI.enabled)
|
||||
{
|
||||
HandleUtility.Repaint();
|
||||
}
|
||||
break;
|
||||
}
|
||||
return value;
|
||||
}
|
||||
}
|
||||
}
|
||||
11
MVS/3DGS-Unity/Editor/Utils/FilePickerControl.cs.meta
Normal file
11
MVS/3DGS-Unity/Editor/Utils/FilePickerControl.cs.meta
Normal file
@@ -0,0 +1,11 @@
|
||||
fileFormatVersion: 2
|
||||
guid: 69e6c946494a9b2479ce96542339029c
|
||||
MonoImporter:
|
||||
externalObjects: {}
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
||||
executionOrder: 0
|
||||
icon: {instanceID: 0}
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
||||
595
MVS/3DGS-Unity/Editor/Utils/KMeansClustering.cs
Normal file
595
MVS/3DGS-Unity/Editor/Utils/KMeansClustering.cs
Normal file
@@ -0,0 +1,595 @@
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
using System;
|
||||
using Unity.Burst;
|
||||
using Unity.Burst.Intrinsics;
|
||||
using Unity.Collections;
|
||||
using Unity.Collections.LowLevel.Unsafe;
|
||||
using Unity.Jobs;
|
||||
using Unity.Mathematics;
|
||||
using Unity.Profiling;
|
||||
using Unity.Profiling.LowLevel;
|
||||
|
||||
namespace GaussianSplatting.Editor.Utils
|
||||
{
|
||||
// Implementation of "Mini Batch" k-means clustering ("Web-Scale K-Means Clustering", Sculley 2010)
|
||||
// using k-means++ for cluster initialization.
|
||||
[BurstCompile]
|
||||
public struct KMeansClustering
|
||||
{
|
||||
static ProfilerMarker s_ProfCalculate = new(ProfilerCategory.Render, "KMeans.Calculate", MarkerFlags.SampleGPU);
|
||||
static ProfilerMarker s_ProfPlusPlus = new(ProfilerCategory.Render, "KMeans.InitialPlusPlus", MarkerFlags.SampleGPU);
|
||||
static ProfilerMarker s_ProfInitialDistanceSum = new(ProfilerCategory.Render, "KMeans.Initialize.DistanceSum", MarkerFlags.SampleGPU);
|
||||
static ProfilerMarker s_ProfInitialPickPoint = new(ProfilerCategory.Render, "KMeans.Initialize.PickPoint", MarkerFlags.SampleGPU);
|
||||
static ProfilerMarker s_ProfInitialDistanceUpdate = new(ProfilerCategory.Render, "KMeans.Initialize.DistanceUpdate", MarkerFlags.SampleGPU);
|
||||
static ProfilerMarker s_ProfAssignClusters = new(ProfilerCategory.Render, "KMeans.AssignClusters", MarkerFlags.SampleGPU);
|
||||
static ProfilerMarker s_ProfUpdateMeans = new(ProfilerCategory.Render, "KMeans.UpdateMeans", MarkerFlags.SampleGPU);
|
||||
|
||||
public static bool Calculate(int dim, NativeArray<float> inputData, int batchSize, float passesOverData, Func<float,bool> progress, NativeArray<float> outClusterMeans, NativeArray<int> outDataLabels)
|
||||
{
|
||||
// Parameter checks
|
||||
if (dim < 1)
|
||||
throw new InvalidOperationException($"KMeans: dimensionality has to be >= 1, was {dim}");
|
||||
if (batchSize < 1)
|
||||
throw new InvalidOperationException($"KMeans: batch size has to be >= 1, was {batchSize}");
|
||||
if (passesOverData < 0.0001f)
|
||||
throw new InvalidOperationException($"KMeans: passes over data must be positive, was {passesOverData}");
|
||||
if (inputData.Length % dim != 0)
|
||||
throw new InvalidOperationException($"KMeans: input length must be multiple of dim={dim}, was {inputData.Length}");
|
||||
if (outClusterMeans.Length % dim != 0)
|
||||
throw new InvalidOperationException($"KMeans: output means length must be multiple of dim={dim}, was {outClusterMeans.Length}");
|
||||
int dataSize = inputData.Length / dim;
|
||||
int k = outClusterMeans.Length / dim;
|
||||
if (k < 1)
|
||||
throw new InvalidOperationException($"KMeans: cluster count length must be at least 1, was {k}");
|
||||
if (dataSize < k)
|
||||
throw new InvalidOperationException($"KMeans: input length ({inputData.Length}) must at least as long as clusters ({outClusterMeans.Length})");
|
||||
if (dataSize != outDataLabels.Length)
|
||||
throw new InvalidOperationException($"KMeans: output labels length must be {dataSize}, was {outDataLabels.Length}");
|
||||
|
||||
using var prof = s_ProfCalculate.Auto();
|
||||
batchSize = math.min(dataSize, batchSize);
|
||||
uint rngState = 1;
|
||||
|
||||
// Do initial cluster placement
|
||||
int initBatchSize = 10 * k;
|
||||
const int kInitAttempts = 3;
|
||||
if (!InitializeCentroids(dim, inputData, initBatchSize, ref rngState, kInitAttempts, outClusterMeans, progress))
|
||||
return false;
|
||||
|
||||
NativeArray<float> counts = new(k, Allocator.TempJob);
|
||||
|
||||
NativeArray<float> batchPoints = new(batchSize * dim, Allocator.TempJob);
|
||||
NativeArray<int> batchClusters = new(batchSize, Allocator.TempJob);
|
||||
|
||||
bool cancelled = false;
|
||||
for (float calcDone = 0.0f, calcLimit = dataSize * passesOverData; calcDone < calcLimit; calcDone += batchSize)
|
||||
{
|
||||
if (progress != null && !progress(0.3f + calcDone / calcLimit * 0.4f))
|
||||
{
|
||||
cancelled = true;
|
||||
break;
|
||||
}
|
||||
|
||||
// generate a batch of random input points
|
||||
MakeRandomBatch(dim, inputData, ref rngState, batchPoints);
|
||||
|
||||
// find which of the current centroids each batch point is closest to
|
||||
{
|
||||
using var profPart = s_ProfAssignClusters.Auto();
|
||||
AssignClustersJob job = new AssignClustersJob
|
||||
{
|
||||
dim = dim,
|
||||
data = batchPoints,
|
||||
means = outClusterMeans,
|
||||
indexOffset = 0,
|
||||
clusters = batchClusters,
|
||||
};
|
||||
job.Schedule(batchSize, 1).Complete();
|
||||
}
|
||||
|
||||
// update the centroids
|
||||
{
|
||||
using var profPart = s_ProfUpdateMeans.Auto();
|
||||
UpdateCentroidsJob job = new UpdateCentroidsJob
|
||||
{
|
||||
m_Clusters = outClusterMeans,
|
||||
m_Dim = dim,
|
||||
m_Counts = counts,
|
||||
m_BatchSize = batchSize,
|
||||
m_BatchClusters = batchClusters,
|
||||
m_BatchPoints = batchPoints
|
||||
};
|
||||
job.Schedule().Complete();
|
||||
}
|
||||
}
|
||||
|
||||
// finally find out closest clusters for all input points
|
||||
{
|
||||
using var profPart = s_ProfAssignClusters.Auto();
|
||||
const int kAssignBatchCount = 256 * 1024;
|
||||
AssignClustersJob job = new AssignClustersJob
|
||||
{
|
||||
dim = dim,
|
||||
data = inputData,
|
||||
means = outClusterMeans,
|
||||
indexOffset = 0,
|
||||
clusters = outDataLabels,
|
||||
};
|
||||
for (int i = 0; i < dataSize; i += kAssignBatchCount)
|
||||
{
|
||||
if (progress != null && !progress(0.7f + (float) i / dataSize * 0.3f))
|
||||
{
|
||||
cancelled = true;
|
||||
break;
|
||||
}
|
||||
job.indexOffset = i;
|
||||
job.Schedule(math.min(kAssignBatchCount, dataSize - i), 512).Complete();
|
||||
}
|
||||
}
|
||||
|
||||
counts.Dispose();
|
||||
batchPoints.Dispose();
|
||||
batchClusters.Dispose();
|
||||
return !cancelled;
|
||||
}
|
||||
|
||||
static unsafe float DistanceSquared(int dim, NativeArray<float> a, int aIndex, NativeArray<float> b, int bIndex)
|
||||
{
|
||||
aIndex *= dim;
|
||||
bIndex *= dim;
|
||||
float d = 0;
|
||||
if (X86.Avx.IsAvxSupported)
|
||||
{
|
||||
// 8x wide with AVX
|
||||
int i = 0;
|
||||
float* aptr = (float*) a.GetUnsafeReadOnlyPtr() + aIndex;
|
||||
float* bptr = (float*) b.GetUnsafeReadOnlyPtr() + bIndex;
|
||||
for (; i + 7 < dim; i += 8)
|
||||
{
|
||||
v256 va = X86.Avx.mm256_loadu_ps(aptr);
|
||||
v256 vb = X86.Avx.mm256_loadu_ps(bptr);
|
||||
v256 vd = X86.Avx.mm256_sub_ps(va, vb);
|
||||
vd = X86.Avx.mm256_mul_ps(vd, vd);
|
||||
|
||||
vd = X86.Avx.mm256_hadd_ps(vd, vd);
|
||||
d += vd.Float0 + vd.Float1 + vd.Float4 + vd.Float5;
|
||||
|
||||
aptr += 8;
|
||||
bptr += 8;
|
||||
}
|
||||
// remainder
|
||||
for (; i < dim; ++i)
|
||||
{
|
||||
float delta = *aptr - *bptr;
|
||||
d += delta * delta;
|
||||
aptr++;
|
||||
bptr++;
|
||||
}
|
||||
}
|
||||
else if (Arm.Neon.IsNeonSupported)
|
||||
{
|
||||
// 4x wide with NEON
|
||||
int i = 0;
|
||||
float* aptr = (float*) a.GetUnsafeReadOnlyPtr() + aIndex;
|
||||
float* bptr = (float*) b.GetUnsafeReadOnlyPtr() + bIndex;
|
||||
for (; i + 3 < dim; i += 4)
|
||||
{
|
||||
v128 va = Arm.Neon.vld1q_f32(aptr);
|
||||
v128 vb = Arm.Neon.vld1q_f32(bptr);
|
||||
v128 vd = Arm.Neon.vsubq_f32(va, vb);
|
||||
vd = Arm.Neon.vmulq_f32(vd, vd);
|
||||
|
||||
d += Arm.Neon.vaddvq_f32(vd);
|
||||
|
||||
aptr += 4;
|
||||
bptr += 4;
|
||||
}
|
||||
// remainder
|
||||
for (; i < dim; ++i)
|
||||
{
|
||||
float delta = *aptr - *bptr;
|
||||
d += delta * delta;
|
||||
aptr++;
|
||||
bptr++;
|
||||
}
|
||||
|
||||
}
|
||||
else
|
||||
{
|
||||
for (var i = 0; i < dim; ++i)
|
||||
{
|
||||
float delta = a[aIndex + i] - b[bIndex + i];
|
||||
d += delta * delta;
|
||||
}
|
||||
}
|
||||
|
||||
return d;
|
||||
}
|
||||
|
||||
static unsafe void CopyElem(int dim, NativeArray<float> src, int srcIndex, NativeArray<float> dst, int dstIndex)
|
||||
{
|
||||
UnsafeUtility.MemCpy((float*) dst.GetUnsafePtr() + dstIndex * dim,
|
||||
(float*) src.GetUnsafeReadOnlyPtr() + srcIndex * dim, dim * 4);
|
||||
}
|
||||
|
||||
[BurstCompile]
|
||||
struct ClosestDistanceInitialJob : IJobParallelFor
|
||||
{
|
||||
public int dim;
|
||||
[ReadOnly] public NativeArray<float> data;
|
||||
[ReadOnly] public NativeArray<float> means;
|
||||
public NativeArray<float> minDistSq;
|
||||
public int pointIndex;
|
||||
public void Execute(int index)
|
||||
{
|
||||
if (index == pointIndex)
|
||||
return;
|
||||
minDistSq[index] = DistanceSquared(dim, data, index, means, 0);
|
||||
}
|
||||
}
|
||||
|
||||
[BurstCompile]
|
||||
struct ClosestDistanceUpdateJob : IJobParallelFor
|
||||
{
|
||||
public int dim;
|
||||
[ReadOnly] public NativeArray<float> data;
|
||||
[ReadOnly] public NativeArray<float> means;
|
||||
[ReadOnly] public NativeBitArray taken;
|
||||
public NativeArray<float> minDistSq;
|
||||
public int meanIndex;
|
||||
public void Execute(int index)
|
||||
{
|
||||
if (taken.IsSet(index))
|
||||
return;
|
||||
float distSq = DistanceSquared(dim, data, index, means, meanIndex);
|
||||
minDistSq[index] = math.min(minDistSq[index], distSq);
|
||||
}
|
||||
}
|
||||
|
||||
[BurstCompile]
|
||||
struct CalcDistSqJob : IJobParallelFor
|
||||
{
|
||||
public const int kBatchSize = 1024;
|
||||
public int dataSize;
|
||||
[ReadOnly] public NativeBitArray taken;
|
||||
[ReadOnly] public NativeArray<float> minDistSq;
|
||||
public NativeArray<float> partialSums;
|
||||
|
||||
public void Execute(int batchIndex)
|
||||
{
|
||||
int iStart = math.min(batchIndex * kBatchSize, dataSize);
|
||||
int iEnd = math.min((batchIndex + 1) * kBatchSize, dataSize);
|
||||
float sum = 0;
|
||||
for (int i = iStart; i < iEnd; ++i)
|
||||
{
|
||||
if (taken.IsSet(i))
|
||||
continue;
|
||||
sum += minDistSq[i];
|
||||
}
|
||||
|
||||
partialSums[batchIndex] = sum;
|
||||
}
|
||||
}
|
||||
|
||||
[BurstCompile]
|
||||
static int PickPointIndex(int dataSize, ref NativeArray<float> partialSums, ref NativeBitArray taken, ref NativeArray<float> minDistSq, float rval)
|
||||
{
|
||||
// Skip batches until we hit the ones that might have value to pick from: binary search for the batch
|
||||
int indexL = 0;
|
||||
int indexR = partialSums.Length;
|
||||
while (indexL < indexR)
|
||||
{
|
||||
int indexM = (indexL + indexR) / 2;
|
||||
if (partialSums[indexM] < rval)
|
||||
indexL = indexM + 1;
|
||||
else
|
||||
indexR = indexM;
|
||||
}
|
||||
float acc = 0.0f;
|
||||
if (indexL > 0)
|
||||
{
|
||||
acc = partialSums[indexL-1];
|
||||
}
|
||||
|
||||
// Now search for the needed point
|
||||
int pointIndex = -1;
|
||||
for (int i = indexL * CalcDistSqJob.kBatchSize; i < dataSize; ++i)
|
||||
{
|
||||
if (taken.IsSet(i))
|
||||
continue;
|
||||
acc += minDistSq[i];
|
||||
if (acc >= rval)
|
||||
{
|
||||
pointIndex = i;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// If we have not found a point, pick the last available one
|
||||
if (pointIndex < 0)
|
||||
{
|
||||
for (int i = dataSize - 1; i >= 0; --i)
|
||||
{
|
||||
if (taken.IsSet(i))
|
||||
continue;
|
||||
pointIndex = i;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (pointIndex < 0)
|
||||
pointIndex = 0;
|
||||
|
||||
return pointIndex;
|
||||
}
|
||||
|
||||
static void KMeansPlusPlus(int dim, int k, NativeArray<float> data, NativeArray<float> means, NativeArray<float> minDistSq, ref uint rngState)
|
||||
{
|
||||
using var prof = s_ProfPlusPlus.Auto();
|
||||
|
||||
int dataSize = data.Length / dim;
|
||||
|
||||
NativeBitArray taken = new NativeBitArray(dataSize, Allocator.TempJob);
|
||||
|
||||
// Select first mean randomly
|
||||
int pointIndex = (int)(pcg_random(ref rngState) % dataSize);
|
||||
taken.Set(pointIndex, true);
|
||||
CopyElem(dim, data, pointIndex, means, 0);
|
||||
|
||||
// For each point: closest squared distance to the picked point
|
||||
{
|
||||
ClosestDistanceInitialJob job = new ClosestDistanceInitialJob
|
||||
{
|
||||
dim = dim,
|
||||
data = data,
|
||||
means = means,
|
||||
minDistSq = minDistSq,
|
||||
pointIndex = pointIndex
|
||||
};
|
||||
job.Schedule(dataSize, 1024).Complete();
|
||||
}
|
||||
|
||||
int sumBatches = (dataSize + CalcDistSqJob.kBatchSize - 1) / CalcDistSqJob.kBatchSize;
|
||||
NativeArray<float> partialSums = new(sumBatches, Allocator.TempJob);
|
||||
int resultCount = 1;
|
||||
while (resultCount < k)
|
||||
{
|
||||
// Find total sum of distances of not yet taken points
|
||||
float distSqTotal = 0;
|
||||
{
|
||||
using var profPart = s_ProfInitialDistanceSum.Auto();
|
||||
CalcDistSqJob job = new CalcDistSqJob
|
||||
{
|
||||
dataSize = dataSize,
|
||||
taken = taken,
|
||||
minDistSq = minDistSq,
|
||||
partialSums = partialSums
|
||||
};
|
||||
job.Schedule(sumBatches, 1).Complete();
|
||||
for (int i = 0; i < sumBatches; ++i)
|
||||
{
|
||||
distSqTotal += partialSums[i];
|
||||
partialSums[i] = distSqTotal;
|
||||
}
|
||||
}
|
||||
|
||||
// Pick a non-taken point, with a probability proportional
|
||||
// to distance: points furthest from any cluster are picked more.
|
||||
{
|
||||
using var profPart = s_ProfInitialPickPoint.Auto();
|
||||
float rval = pcg_hash_float(rngState + (uint)resultCount, distSqTotal);
|
||||
pointIndex = PickPointIndex(dataSize, ref partialSums, ref taken, ref minDistSq, rval);
|
||||
}
|
||||
|
||||
// Take this point as a new cluster mean
|
||||
taken.Set(pointIndex, true);
|
||||
CopyElem(dim, data, pointIndex, means, resultCount);
|
||||
++resultCount;
|
||||
|
||||
if (resultCount < k)
|
||||
{
|
||||
// Update distances of the points: since it tracks closest one,
|
||||
// calculate distance to the new cluster and update if smaller.
|
||||
using var profPart = s_ProfInitialDistanceUpdate.Auto();
|
||||
ClosestDistanceUpdateJob job = new ClosestDistanceUpdateJob
|
||||
{
|
||||
dim = dim,
|
||||
data = data,
|
||||
means = means,
|
||||
minDistSq = minDistSq,
|
||||
taken = taken,
|
||||
meanIndex = resultCount - 1
|
||||
};
|
||||
job.Schedule(dataSize, 256).Complete();
|
||||
}
|
||||
}
|
||||
|
||||
taken.Dispose();
|
||||
partialSums.Dispose();
|
||||
}
|
||||
|
||||
// For each data point, find cluster index that is closest to it
|
||||
[BurstCompile]
|
||||
struct AssignClustersJob : IJobParallelFor
|
||||
{
|
||||
public int indexOffset;
|
||||
public int dim;
|
||||
[ReadOnly] public NativeArray<float> data;
|
||||
[ReadOnly] public NativeArray<float> means;
|
||||
[NativeDisableParallelForRestriction] public NativeArray<int> clusters;
|
||||
[NativeDisableContainerSafetyRestriction] [NativeDisableParallelForRestriction] public NativeArray<float> distances;
|
||||
|
||||
public void Execute(int index)
|
||||
{
|
||||
index += indexOffset;
|
||||
int meansCount = means.Length / dim;
|
||||
float minDist = float.MaxValue;
|
||||
int minIndex = 0;
|
||||
for (int i = 0; i < meansCount; ++i)
|
||||
{
|
||||
float dist = DistanceSquared(dim, data, index, means, i);
|
||||
if (dist < minDist)
|
||||
{
|
||||
minIndex = i;
|
||||
minDist = dist;
|
||||
}
|
||||
}
|
||||
clusters[index] = minIndex;
|
||||
if (distances.IsCreated)
|
||||
distances[index] = minDist;
|
||||
}
|
||||
}
|
||||
|
||||
static void MakeRandomBatch(int dim, NativeArray<float> inputData, ref uint rngState, NativeArray<float> outBatch)
|
||||
{
|
||||
var job = new MakeBatchJob
|
||||
{
|
||||
m_Dim = dim,
|
||||
m_InputData = inputData,
|
||||
m_Seed = pcg_random(ref rngState),
|
||||
m_OutBatch = outBatch
|
||||
};
|
||||
job.Schedule().Complete();
|
||||
}
|
||||
|
||||
[BurstCompile]
|
||||
struct MakeBatchJob : IJob
|
||||
{
|
||||
public int m_Dim;
|
||||
public NativeArray<float> m_InputData;
|
||||
public NativeArray<float> m_OutBatch;
|
||||
public uint m_Seed;
|
||||
public void Execute()
|
||||
{
|
||||
uint dataSize = (uint)(m_InputData.Length / m_Dim);
|
||||
int batchSize = m_OutBatch.Length / m_Dim;
|
||||
NativeHashSet<int> picked = new(batchSize, Allocator.Temp);
|
||||
while (picked.Count < batchSize)
|
||||
{
|
||||
int index = (int)(pcg_hash(m_Seed++) % dataSize);
|
||||
if (!picked.Contains(index))
|
||||
{
|
||||
CopyElem(m_Dim, m_InputData, index, m_OutBatch, picked.Count);
|
||||
picked.Add(index);
|
||||
}
|
||||
}
|
||||
picked.Dispose();
|
||||
}
|
||||
}
|
||||
|
||||
[BurstCompile]
|
||||
struct UpdateCentroidsJob : IJob
|
||||
{
|
||||
public int m_Dim;
|
||||
public int m_BatchSize;
|
||||
[ReadOnly] public NativeArray<int> m_BatchClusters;
|
||||
public NativeArray<float> m_Counts;
|
||||
[ReadOnly] public NativeArray<float> m_BatchPoints;
|
||||
public NativeArray<float> m_Clusters;
|
||||
|
||||
public void Execute()
|
||||
{
|
||||
for (int i = 0; i < m_BatchSize; ++i)
|
||||
{
|
||||
int clusterIndex = m_BatchClusters[i];
|
||||
m_Counts[clusterIndex]++;
|
||||
float alpha = 1.0f / m_Counts[clusterIndex];
|
||||
|
||||
for (int j = 0; j < m_Dim; ++j)
|
||||
{
|
||||
m_Clusters[clusterIndex * m_Dim + j] = math.lerp(m_Clusters[clusterIndex * m_Dim + j],
|
||||
m_BatchPoints[i * m_Dim + j], alpha);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
static bool InitializeCentroids(int dim, NativeArray<float> inputData, int initBatchSize, ref uint rngState, int initAttempts, NativeArray<float> outClusters, Func<float,bool> progress)
|
||||
{
|
||||
using var prof = s_ProfPlusPlus.Auto();
|
||||
|
||||
int k = outClusters.Length / dim;
|
||||
int dataSize = inputData.Length / dim;
|
||||
initBatchSize = math.min(initBatchSize, dataSize);
|
||||
|
||||
NativeArray<float> centroidBatch = new(initBatchSize * dim, Allocator.TempJob);
|
||||
NativeArray<float> validationBatch = new(initBatchSize * dim, Allocator.TempJob);
|
||||
MakeRandomBatch(dim, inputData, ref rngState, centroidBatch);
|
||||
MakeRandomBatch(dim, inputData, ref rngState, validationBatch);
|
||||
|
||||
NativeArray<int> tmpIndices = new(initBatchSize, Allocator.TempJob);
|
||||
NativeArray<float> tmpDistances = new(initBatchSize, Allocator.TempJob);
|
||||
NativeArray<float> curCentroids = new(k * dim, Allocator.TempJob);
|
||||
|
||||
float minDistSum = float.MaxValue;
|
||||
|
||||
bool cancelled = false;
|
||||
for (int ia = 0; ia < initAttempts; ++ia)
|
||||
{
|
||||
if (progress != null && !progress((float) ia / initAttempts * 0.3f))
|
||||
{
|
||||
cancelled = true;
|
||||
break;
|
||||
}
|
||||
|
||||
KMeansPlusPlus(dim, k, centroidBatch, curCentroids, tmpDistances, ref rngState);
|
||||
|
||||
{
|
||||
using var profPart = s_ProfAssignClusters.Auto();
|
||||
AssignClustersJob job = new AssignClustersJob
|
||||
{
|
||||
dim = dim,
|
||||
data = validationBatch,
|
||||
means = curCentroids,
|
||||
indexOffset = 0,
|
||||
clusters = tmpIndices,
|
||||
distances = tmpDistances
|
||||
};
|
||||
job.Schedule(initBatchSize, 1).Complete();
|
||||
}
|
||||
|
||||
float distSum = 0;
|
||||
foreach (var d in tmpDistances)
|
||||
distSum += d;
|
||||
|
||||
// is this centroid better?
|
||||
if (distSum < minDistSum)
|
||||
{
|
||||
minDistSum = distSum;
|
||||
outClusters.CopyFrom(curCentroids);
|
||||
}
|
||||
}
|
||||
|
||||
centroidBatch.Dispose();
|
||||
validationBatch.Dispose();
|
||||
tmpDistances.Dispose();
|
||||
tmpIndices.Dispose();
|
||||
curCentroids.Dispose();
|
||||
return !cancelled;
|
||||
}
|
||||
|
||||
// https://www.reedbeta.com/blog/hash-functions-for-gpu-rendering/
|
||||
static uint pcg_hash(uint input)
|
||||
{
|
||||
uint state = input * 747796405u + 2891336453u;
|
||||
uint word = ((state >> (int)((state >> 28) + 4u)) ^ state) * 277803737u;
|
||||
return (word >> 22) ^ word;
|
||||
}
|
||||
|
||||
static float pcg_hash_float(uint input, float upTo)
|
||||
{
|
||||
uint val = pcg_hash(input);
|
||||
float f = math.asfloat(0x3f800000 | (val >> 9)) - 1.0f;
|
||||
return f * upTo;
|
||||
}
|
||||
|
||||
static uint pcg_random(ref uint rng_state)
|
||||
{
|
||||
uint state = rng_state;
|
||||
rng_state = rng_state * 747796405u + 2891336453u;
|
||||
uint word = ((state >> (int)((state >> 28) + 4u)) ^ state) * 277803737u;
|
||||
return (word >> 22) ^ word;
|
||||
}
|
||||
}
|
||||
}
|
||||
11
MVS/3DGS-Unity/Editor/Utils/KMeansClustering.cs.meta
Normal file
11
MVS/3DGS-Unity/Editor/Utils/KMeansClustering.cs.meta
Normal file
@@ -0,0 +1,11 @@
|
||||
fileFormatVersion: 2
|
||||
guid: 9cecadf9c980a4ad9a30d0e1ae09d16a
|
||||
MonoImporter:
|
||||
externalObjects: {}
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
||||
executionOrder: 0
|
||||
icon: {instanceID: 0}
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
||||
107
MVS/3DGS-Unity/Editor/Utils/PLYFileReader.cs
Normal file
107
MVS/3DGS-Unity/Editor/Utils/PLYFileReader.cs
Normal file
@@ -0,0 +1,107 @@
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using Unity.Collections;
|
||||
|
||||
namespace GaussianSplatting.Editor.Utils
|
||||
{
|
||||
public static class PLYFileReader
|
||||
{
|
||||
public static void ReadFileHeader(string filePath, out int vertexCount, out int vertexStride, out List<string> attrNames)
|
||||
{
|
||||
vertexCount = 0;
|
||||
vertexStride = 0;
|
||||
attrNames = new List<string>();
|
||||
if (!File.Exists(filePath))
|
||||
return;
|
||||
using var fs = new FileStream(filePath, FileMode.Open, FileAccess.Read);
|
||||
ReadHeaderImpl(filePath, out vertexCount, out vertexStride, out attrNames, fs);
|
||||
}
|
||||
|
||||
static void ReadHeaderImpl(string filePath, out int vertexCount, out int vertexStride, out List<string> attrNames, FileStream fs)
|
||||
{
|
||||
// C# arrays and NativeArrays make it hard to have a "byte" array larger than 2GB :/
|
||||
if (fs.Length >= 2 * 1024 * 1024 * 1024L)
|
||||
throw new IOException($"PLY {filePath} read error: currently files larger than 2GB are not supported");
|
||||
|
||||
// read header
|
||||
vertexCount = 0;
|
||||
vertexStride = 0;
|
||||
attrNames = new List<string>();
|
||||
const int kMaxHeaderLines = 9000;
|
||||
for (int lineIdx = 0; lineIdx < kMaxHeaderLines; ++lineIdx)
|
||||
{
|
||||
var line = ReadLine(fs);
|
||||
if (line == "end_header" || line.Length == 0)
|
||||
break;
|
||||
var tokens = line.Split(' ');
|
||||
if (tokens.Length == 3 && tokens[0] == "element" && tokens[1] == "vertex")
|
||||
vertexCount = int.Parse(tokens[2]);
|
||||
if (tokens.Length == 3 && tokens[0] == "property")
|
||||
{
|
||||
ElementType type = tokens[1] switch
|
||||
{
|
||||
"float" => ElementType.Float,
|
||||
"double" => ElementType.Double,
|
||||
"uchar" => ElementType.UChar,
|
||||
_ => ElementType.None
|
||||
};
|
||||
vertexStride += TypeToSize(type);
|
||||
attrNames.Add(tokens[2]);
|
||||
}
|
||||
}
|
||||
//Debug.Log($"PLY {filePath} vtx {vertexCount} stride {vertexStride} attrs #{attrNames.Count} {string.Join(',', attrNames)}");
|
||||
}
|
||||
|
||||
public static void ReadFile(string filePath, out int vertexCount, out int vertexStride, out List<string> attrNames, out NativeArray<byte> vertices)
|
||||
{
|
||||
using var fs = new FileStream(filePath, FileMode.Open, FileAccess.Read);
|
||||
ReadHeaderImpl(filePath, out vertexCount, out vertexStride, out attrNames, fs);
|
||||
|
||||
vertices = new NativeArray<byte>(vertexCount * vertexStride, Allocator.Persistent);
|
||||
var readBytes = fs.Read(vertices);
|
||||
if (readBytes != vertices.Length)
|
||||
throw new IOException($"PLY {filePath} read error, expected {vertices.Length} data bytes got {readBytes}");
|
||||
}
|
||||
|
||||
enum ElementType
|
||||
{
|
||||
None,
|
||||
Float,
|
||||
Double,
|
||||
UChar
|
||||
}
|
||||
|
||||
static int TypeToSize(ElementType t)
|
||||
{
|
||||
return t switch
|
||||
{
|
||||
ElementType.None => 0,
|
||||
ElementType.Float => 4,
|
||||
ElementType.Double => 8,
|
||||
ElementType.UChar => 1,
|
||||
_ => throw new ArgumentOutOfRangeException(nameof(t), t, null)
|
||||
};
|
||||
}
|
||||
|
||||
static string ReadLine(FileStream fs)
|
||||
{
|
||||
var byteBuffer = new List<byte>();
|
||||
while (true)
|
||||
{
|
||||
int b = fs.ReadByte();
|
||||
if (b == -1 || b == '\n')
|
||||
break;
|
||||
byteBuffer.Add((byte)b);
|
||||
}
|
||||
// if line had CRLF line endings, remove the CR part
|
||||
if (byteBuffer.Count > 0 && byteBuffer.Last() == '\r')
|
||||
byteBuffer.RemoveAt(byteBuffer.Count-1);
|
||||
return Encoding.UTF8.GetString(byteBuffer.ToArray());
|
||||
}
|
||||
}
|
||||
}
|
||||
11
MVS/3DGS-Unity/Editor/Utils/PLYFileReader.cs.meta
Normal file
11
MVS/3DGS-Unity/Editor/Utils/PLYFileReader.cs.meta
Normal file
@@ -0,0 +1,11 @@
|
||||
fileFormatVersion: 2
|
||||
guid: 27964c85711004ddca73909489af2e2e
|
||||
MonoImporter:
|
||||
externalObjects: {}
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
||||
executionOrder: 0
|
||||
icon: {instanceID: 0}
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
||||
403
MVS/3DGS-Unity/Editor/Utils/TinyJsonParser.cs
Normal file
403
MVS/3DGS-Unity/Editor/Utils/TinyJsonParser.cs
Normal file
@@ -0,0 +1,403 @@
|
||||
/*
|
||||
Embedded TinyJSON from https://github.com/pbhogan/TinyJSON (version 71fed96, 2019 May 10) directly here, with
|
||||
custom namespace wrapped around it so it does not clash with any other embedded TinyJSON. Original license:
|
||||
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2018 Alex Parker
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of
|
||||
this software and associated documentation files (the "Software"), to deal in
|
||||
the Software without restriction, including without limitation the rights to
|
||||
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
|
||||
the Software, and to permit persons to whom the Software is furnished to do so,
|
||||
subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
|
||||
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
|
||||
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
|
||||
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
|
||||
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
*/
|
||||
|
||||
using System;
|
||||
using System.Collections;
|
||||
using System.Collections.Generic;
|
||||
using System.Reflection;
|
||||
using System.Runtime.Serialization;
|
||||
using System.Text;
|
||||
|
||||
namespace GaussianSplatting.Editor.Utils
|
||||
{
|
||||
// Really simple JSON parser in ~300 lines
|
||||
// - Attempts to parse JSON files with minimal GC allocation
|
||||
// - Nice and simple "[1,2,3]".FromJson<List<int>>() API
|
||||
// - Classes and structs can be parsed too!
|
||||
// class Foo { public int Value; }
|
||||
// "{\"Value\":10}".FromJson<Foo>()
|
||||
// - Can parse JSON without type information into Dictionary<string,object> and List<object> e.g.
|
||||
// "[1,2,3]".FromJson<object>().GetType() == typeof(List<object>)
|
||||
// "{\"Value\":10}".FromJson<object>().GetType() == typeof(Dictionary<string,object>)
|
||||
// - No JIT Emit support to support AOT compilation on iOS
|
||||
// - Attempts are made to NOT throw an exception if the JSON is corrupted or invalid: returns null instead.
|
||||
// - Only public fields and property setters on classes/structs will be written to
|
||||
//
|
||||
// Limitations:
|
||||
// - No JIT Emit support to parse structures quickly
|
||||
// - Limited to parsing <2GB JSON files (due to int.MaxValue)
|
||||
// - Parsing of abstract classes or interfaces is NOT supported and will throw an exception.
|
||||
public static class JSONParser
|
||||
{
|
||||
[ThreadStatic] static Stack<List<string>> splitArrayPool;
|
||||
[ThreadStatic] static StringBuilder stringBuilder;
|
||||
[ThreadStatic] static Dictionary<Type, Dictionary<string, FieldInfo>> fieldInfoCache;
|
||||
[ThreadStatic] static Dictionary<Type, Dictionary<string, PropertyInfo>> propertyInfoCache;
|
||||
|
||||
public static T FromJson<T>(this string json)
|
||||
{
|
||||
// Initialize, if needed, the ThreadStatic variables
|
||||
if (propertyInfoCache == null) propertyInfoCache = new Dictionary<Type, Dictionary<string, PropertyInfo>>();
|
||||
if (fieldInfoCache == null) fieldInfoCache = new Dictionary<Type, Dictionary<string, FieldInfo>>();
|
||||
if (stringBuilder == null) stringBuilder = new StringBuilder();
|
||||
if (splitArrayPool == null) splitArrayPool = new Stack<List<string>>();
|
||||
|
||||
//Remove all whitespace not within strings to make parsing simpler
|
||||
stringBuilder.Length = 0;
|
||||
for (int i = 0; i < json.Length; i++)
|
||||
{
|
||||
char c = json[i];
|
||||
if (c == '"')
|
||||
{
|
||||
i = AppendUntilStringEnd(true, i, json);
|
||||
continue;
|
||||
}
|
||||
if (char.IsWhiteSpace(c))
|
||||
continue;
|
||||
|
||||
stringBuilder.Append(c);
|
||||
}
|
||||
|
||||
//Parse the thing!
|
||||
return (T)ParseValue(typeof(T), stringBuilder.ToString());
|
||||
}
|
||||
|
||||
static int AppendUntilStringEnd(bool appendEscapeCharacter, int startIdx, string json)
|
||||
{
|
||||
stringBuilder.Append(json[startIdx]);
|
||||
for (int i = startIdx + 1; i < json.Length; i++)
|
||||
{
|
||||
if (json[i] == '\\')
|
||||
{
|
||||
if (appendEscapeCharacter)
|
||||
stringBuilder.Append(json[i]);
|
||||
stringBuilder.Append(json[i + 1]);
|
||||
i++;//Skip next character as it is escaped
|
||||
}
|
||||
else if (json[i] == '"')
|
||||
{
|
||||
stringBuilder.Append(json[i]);
|
||||
return i;
|
||||
}
|
||||
else
|
||||
stringBuilder.Append(json[i]);
|
||||
}
|
||||
return json.Length - 1;
|
||||
}
|
||||
|
||||
//Splits { <value>:<value>, <value>:<value> } and [ <value>, <value> ] into a list of <value> strings
|
||||
static List<string> Split(string json)
|
||||
{
|
||||
List<string> splitArray = splitArrayPool.Count > 0 ? splitArrayPool.Pop() : new List<string>();
|
||||
splitArray.Clear();
|
||||
if (json.Length == 2)
|
||||
return splitArray;
|
||||
int parseDepth = 0;
|
||||
stringBuilder.Length = 0;
|
||||
for (int i = 1; i < json.Length - 1; i++)
|
||||
{
|
||||
switch (json[i])
|
||||
{
|
||||
case '[':
|
||||
case '{':
|
||||
parseDepth++;
|
||||
break;
|
||||
case ']':
|
||||
case '}':
|
||||
parseDepth--;
|
||||
break;
|
||||
case '"':
|
||||
i = AppendUntilStringEnd(true, i, json);
|
||||
continue;
|
||||
case ',':
|
||||
case ':':
|
||||
if (parseDepth == 0)
|
||||
{
|
||||
splitArray.Add(stringBuilder.ToString());
|
||||
stringBuilder.Length = 0;
|
||||
continue;
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
stringBuilder.Append(json[i]);
|
||||
}
|
||||
|
||||
splitArray.Add(stringBuilder.ToString());
|
||||
|
||||
return splitArray;
|
||||
}
|
||||
|
||||
internal static object ParseValue(Type type, string json)
|
||||
{
|
||||
if (type == typeof(string))
|
||||
{
|
||||
if (json.Length <= 2)
|
||||
return string.Empty;
|
||||
StringBuilder parseStringBuilder = new StringBuilder(json.Length);
|
||||
for (int i = 1; i < json.Length - 1; ++i)
|
||||
{
|
||||
if (json[i] == '\\' && i + 1 < json.Length - 1)
|
||||
{
|
||||
int j = "\"\\nrtbf/".IndexOf(json[i + 1]);
|
||||
if (j >= 0)
|
||||
{
|
||||
parseStringBuilder.Append("\"\\\n\r\t\b\f/"[j]);
|
||||
++i;
|
||||
continue;
|
||||
}
|
||||
if (json[i + 1] == 'u' && i + 5 < json.Length - 1)
|
||||
{
|
||||
UInt32 c = 0;
|
||||
if (UInt32.TryParse(json.Substring(i + 2, 4), System.Globalization.NumberStyles.AllowHexSpecifier, null, out c))
|
||||
{
|
||||
parseStringBuilder.Append((char)c);
|
||||
i += 5;
|
||||
continue;
|
||||
}
|
||||
}
|
||||
}
|
||||
parseStringBuilder.Append(json[i]);
|
||||
}
|
||||
return parseStringBuilder.ToString();
|
||||
}
|
||||
if (type.IsPrimitive)
|
||||
{
|
||||
var result = Convert.ChangeType(json, type, System.Globalization.CultureInfo.InvariantCulture);
|
||||
return result;
|
||||
}
|
||||
if (type == typeof(decimal))
|
||||
{
|
||||
decimal result;
|
||||
decimal.TryParse(json, System.Globalization.NumberStyles.Float, System.Globalization.CultureInfo.InvariantCulture, out result);
|
||||
return result;
|
||||
}
|
||||
if (type == typeof(DateTime))
|
||||
{
|
||||
DateTime result;
|
||||
DateTime.TryParse(json.Replace("\"",""), System.Globalization.CultureInfo.InvariantCulture, System.Globalization.DateTimeStyles.None, out result);
|
||||
return result;
|
||||
}
|
||||
if (json == "null")
|
||||
{
|
||||
return null;
|
||||
}
|
||||
if (type.IsEnum)
|
||||
{
|
||||
if (json[0] == '"')
|
||||
json = json.Substring(1, json.Length - 2);
|
||||
try
|
||||
{
|
||||
return Enum.Parse(type, json, false);
|
||||
}
|
||||
catch
|
||||
{
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
if (type.IsArray)
|
||||
{
|
||||
Type arrayType = type.GetElementType();
|
||||
if (json[0] != '[' || json[json.Length - 1] != ']')
|
||||
return null;
|
||||
|
||||
List<string> elems = Split(json);
|
||||
Array newArray = Array.CreateInstance(arrayType, elems.Count);
|
||||
for (int i = 0; i < elems.Count; i++)
|
||||
newArray.SetValue(ParseValue(arrayType, elems[i]), i);
|
||||
splitArrayPool.Push(elems);
|
||||
return newArray;
|
||||
}
|
||||
if (type.IsGenericType && type.GetGenericTypeDefinition() == typeof(List<>))
|
||||
{
|
||||
Type listType = type.GetGenericArguments()[0];
|
||||
if (json[0] != '[' || json[json.Length - 1] != ']')
|
||||
return null;
|
||||
|
||||
List<string> elems = Split(json);
|
||||
var list = (IList)type.GetConstructor(new Type[] { typeof(int) }).Invoke(new object[] { elems.Count });
|
||||
for (int i = 0; i < elems.Count; i++)
|
||||
list.Add(ParseValue(listType, elems[i]));
|
||||
splitArrayPool.Push(elems);
|
||||
return list;
|
||||
}
|
||||
if (type.IsGenericType && type.GetGenericTypeDefinition() == typeof(Dictionary<,>))
|
||||
{
|
||||
Type keyType, valueType;
|
||||
{
|
||||
Type[] args = type.GetGenericArguments();
|
||||
keyType = args[0];
|
||||
valueType = args[1];
|
||||
}
|
||||
|
||||
//Refuse to parse dictionary keys that aren't of type string
|
||||
if (keyType != typeof(string))
|
||||
return null;
|
||||
//Must be a valid dictionary element
|
||||
if (json[0] != '{' || json[json.Length - 1] != '}')
|
||||
return null;
|
||||
//The list is split into key/value pairs only, this means the split must be divisible by 2 to be valid JSON
|
||||
List<string> elems = Split(json);
|
||||
if (elems.Count % 2 != 0)
|
||||
return null;
|
||||
|
||||
var dictionary = (IDictionary)type.GetConstructor(new Type[] { typeof(int) }).Invoke(new object[] { elems.Count / 2 });
|
||||
for (int i = 0; i < elems.Count; i += 2)
|
||||
{
|
||||
if (elems[i].Length <= 2)
|
||||
continue;
|
||||
string keyValue = elems[i].Substring(1, elems[i].Length - 2);
|
||||
object val = ParseValue(valueType, elems[i + 1]);
|
||||
dictionary[keyValue] = val;
|
||||
}
|
||||
return dictionary;
|
||||
}
|
||||
if (type == typeof(object))
|
||||
{
|
||||
return ParseAnonymousValue(json);
|
||||
}
|
||||
if (json[0] == '{' && json[json.Length - 1] == '}')
|
||||
{
|
||||
return ParseObject(type, json);
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
static object ParseAnonymousValue(string json)
|
||||
{
|
||||
if (json.Length == 0)
|
||||
return null;
|
||||
if (json[0] == '{' && json[json.Length - 1] == '}')
|
||||
{
|
||||
List<string> elems = Split(json);
|
||||
if (elems.Count % 2 != 0)
|
||||
return null;
|
||||
var dict = new Dictionary<string, object>(elems.Count / 2);
|
||||
for (int i = 0; i < elems.Count; i += 2)
|
||||
dict[elems[i].Substring(1, elems[i].Length - 2)] = ParseAnonymousValue(elems[i + 1]);
|
||||
return dict;
|
||||
}
|
||||
if (json[0] == '[' && json[json.Length - 1] == ']')
|
||||
{
|
||||
List<string> items = Split(json);
|
||||
var finalList = new List<object>(items.Count);
|
||||
for (int i = 0; i < items.Count; i++)
|
||||
finalList.Add(ParseAnonymousValue(items[i]));
|
||||
return finalList;
|
||||
}
|
||||
if (json[0] == '"' && json[json.Length - 1] == '"')
|
||||
{
|
||||
string str = json.Substring(1, json.Length - 2);
|
||||
return str.Replace("\\", string.Empty);
|
||||
}
|
||||
if (char.IsDigit(json[0]) || json[0] == '-')
|
||||
{
|
||||
if (json.Contains("."))
|
||||
{
|
||||
double result;
|
||||
double.TryParse(json, System.Globalization.NumberStyles.Float, System.Globalization.CultureInfo.InvariantCulture, out result);
|
||||
return result;
|
||||
}
|
||||
else
|
||||
{
|
||||
int result;
|
||||
int.TryParse(json, out result);
|
||||
return result;
|
||||
}
|
||||
}
|
||||
if (json == "true")
|
||||
return true;
|
||||
if (json == "false")
|
||||
return false;
|
||||
// handles json == "null" as well as invalid JSON
|
||||
return null;
|
||||
}
|
||||
|
||||
static Dictionary<string, T> CreateMemberNameDictionary<T>(T[] members) where T : MemberInfo
|
||||
{
|
||||
Dictionary<string, T> nameToMember = new Dictionary<string, T>(StringComparer.OrdinalIgnoreCase);
|
||||
for (int i = 0; i < members.Length; i++)
|
||||
{
|
||||
T member = members[i];
|
||||
if (member.IsDefined(typeof(IgnoreDataMemberAttribute), true))
|
||||
continue;
|
||||
|
||||
string name = member.Name;
|
||||
if (member.IsDefined(typeof(DataMemberAttribute), true))
|
||||
{
|
||||
DataMemberAttribute dataMemberAttribute = (DataMemberAttribute)Attribute.GetCustomAttribute(member, typeof(DataMemberAttribute), true);
|
||||
if (!string.IsNullOrEmpty(dataMemberAttribute.Name))
|
||||
name = dataMemberAttribute.Name;
|
||||
}
|
||||
|
||||
nameToMember.Add(name, member);
|
||||
}
|
||||
|
||||
return nameToMember;
|
||||
}
|
||||
|
||||
static object ParseObject(Type type, string json)
|
||||
{
|
||||
object instance = FormatterServices.GetUninitializedObject(type);
|
||||
|
||||
//The list is split into key/value pairs only, this means the split must be divisible by 2 to be valid JSON
|
||||
List<string> elems = Split(json);
|
||||
if (elems.Count % 2 != 0)
|
||||
return instance;
|
||||
|
||||
Dictionary<string, FieldInfo> nameToField;
|
||||
Dictionary<string, PropertyInfo> nameToProperty;
|
||||
if (!fieldInfoCache.TryGetValue(type, out nameToField))
|
||||
{
|
||||
nameToField = CreateMemberNameDictionary(type.GetFields(BindingFlags.Instance | BindingFlags.Public | BindingFlags.FlattenHierarchy));
|
||||
fieldInfoCache.Add(type, nameToField);
|
||||
}
|
||||
if (!propertyInfoCache.TryGetValue(type, out nameToProperty))
|
||||
{
|
||||
nameToProperty = CreateMemberNameDictionary(type.GetProperties(BindingFlags.Instance | BindingFlags.Public | BindingFlags.FlattenHierarchy));
|
||||
propertyInfoCache.Add(type, nameToProperty);
|
||||
}
|
||||
|
||||
for (int i = 0; i < elems.Count; i += 2)
|
||||
{
|
||||
if (elems[i].Length <= 2)
|
||||
continue;
|
||||
string key = elems[i].Substring(1, elems[i].Length - 2);
|
||||
string value = elems[i + 1];
|
||||
|
||||
FieldInfo fieldInfo;
|
||||
PropertyInfo propertyInfo;
|
||||
if (nameToField.TryGetValue(key, out fieldInfo))
|
||||
fieldInfo.SetValue(instance, ParseValue(fieldInfo.FieldType, value));
|
||||
else if (nameToProperty.TryGetValue(key, out propertyInfo))
|
||||
propertyInfo.SetValue(instance, ParseValue(propertyInfo.PropertyType, value), null);
|
||||
}
|
||||
|
||||
return instance;
|
||||
}
|
||||
}
|
||||
}
|
||||
11
MVS/3DGS-Unity/Editor/Utils/TinyJsonParser.cs.meta
Normal file
11
MVS/3DGS-Unity/Editor/Utils/TinyJsonParser.cs.meta
Normal file
@@ -0,0 +1,11 @@
|
||||
fileFormatVersion: 2
|
||||
guid: e9ea5041388393f459c378c31e4d7b1f
|
||||
MonoImporter:
|
||||
externalObjects: {}
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
||||
executionOrder: 0
|
||||
icon: {instanceID: 0}
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
||||
Reference in New Issue
Block a user