Initialer Upload neues Unity-Projekt

This commit is contained in:
oxidiert
2025-07-09 11:02:37 +02:00
commit da5f268d21
1474 changed files with 76390 additions and 0 deletions

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,22 @@
fileFormatVersion: 2
guid: 0b4a67630b216244f95a642a898628e1
timeCreated: 1438629271
licenseType: Store
MonoImporter:
serializedVersion: 2
defaultReferences:
- startSound: {fileID: 8300000, guid: 3b7a828a646174088aa48ee1db9c439c, type: 3}
- doneSound: {fileID: 8300000, guid: a22b8c429f521429084d616c68dda19a, type: 3}
- failSound: {fileID: 8300000, guid: b4ed406e6e315450f85fa2c0f65bce01, type: 3}
- fadeMaterial: {fileID: 2100000, guid: e642b458f4453e04ab7e2b711c58db98, type: 2}
- convertPanoramaShader: {fileID: 7200000, guid: 3f65e701d3d2eb0419c22d8a8f17431c,
type: 3}
- convertPanoramaStereoShader: {fileID: 7200000, guid: 3e480fbf28fbc684f8fe9ab2cef9dae1,
type: 3}
- textureToBufferShader: {fileID: 7200000, guid: d1cd5a67a32f22748a935ca069e561c0,
type: 3}
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,9 @@
fileFormatVersion: 2
guid: 376a7f47127e3134bab061cb12f146f8
folderAsset: yes
timeCreated: 1438620153
licenseType: Store
DefaultImporter:
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,275 @@
#pragma kernel CubeMapToEquirectangular
#pragma kernel CubeMapToEquirectangularPositiveY
#pragma kernel CubeMapToEquirectangularNegativeY
RWStructuredBuffer<uint> result;
StructuredBuffer<uint> cameraPixels;
SamplerState MyLinearClampSampler;
uint equirectangularWidth;
uint equirectangularHeight;
uint ssaaFactor;
uint cameraWidth;
uint cameraHeight;
uint startY;
uint sliceHeight;
uint cameraPixelsSentinelIdx;
uint sentinelIdx;
[numthreads(32,32,1)] // Must match threadsX, threadsY in CapturePanorama.cs
void CubeMapToEquirectangular (uint3 dtid : SV_DispatchThreadID)
{
if (dtid.x >= equirectangularWidth || dtid.y >= sliceHeight) // In case width/height not multiple of numthreads
return;
if (dtid.x == 0u && dtid.y == 0u)
result[sentinelIdx] = cameraPixels[cameraPixelsSentinelIdx]; // Sentinel value - set correctly only if set correctly in input buffer
// Must match enum UnityEngine.CubemapFace
static const uint PositiveX = 0u;
static const uint NegativeX = 1u;
static const uint PositiveY = 2u;
static const uint NegativeY = 3u;
static const uint PositiveZ = 4u;
static const uint NegativeZ = 5u;
static const float pi = 3.14159265f;
float4 totalColor = float4(0.0f, 0.0f, 0.0f, 0.0f);
uint2 pos = uint2(dtid.x, dtid.y + startY);
uint2 loopStart = pos * ssaaFactor;
uint2 loopEnd = loopStart + uint2(ssaaFactor, ssaaFactor);
for (uint y = loopStart.y; y < loopEnd.y; y++)
{
for (uint x = loopStart.x; x < loopEnd.x; x++)
{
float xcoord = (float)x / (equirectangularWidth * ssaaFactor);
float ycoord = (float)y / (equirectangularHeight * ssaaFactor);
float latitude = (ycoord - 0.5f) * pi;
float longitude = (xcoord * 2.0f - 1.0f) * pi;
float cosLat = cos(latitude);
float equirectRayDirectionX = cosLat * sin (longitude);
float equirectRayDirectionY = sin (latitude);
float equirectRayDirectionZ = cosLat * cos (longitude);
float distance;
float u, v;
uint cameraNum;
distance = 1.0f / equirectRayDirectionY;
u = equirectRayDirectionX * distance; v = equirectRayDirectionZ * distance;
if (u * u <= 1.0f && v * v <= 1.0f) {
if (equirectRayDirectionY > 0.0f) {
cameraNum = PositiveY;
} else {
u = -u;
cameraNum = NegativeY;
}
}
else
{
distance = 1.0f / equirectRayDirectionX;
u = -equirectRayDirectionZ * distance; v = equirectRayDirectionY * distance;
if (u * u <= 1.0f && v * v <= 1.0f) {
if (equirectRayDirectionX > 0.0f) {
v = -v;
cameraNum = PositiveX;
} else {
cameraNum = NegativeX;
}
}
else
{
distance = 1.0f / equirectRayDirectionZ;
u = equirectRayDirectionX * distance; v = equirectRayDirectionY * distance;
if (u * u <= 1.0f && v * v <= 1.0f) {
if (equirectRayDirectionZ > 0.0f) {
v = -v;
cameraNum = PositiveZ;
} else {
cameraNum = NegativeZ;
}
}
}
}
u = (u + 1.0f) * 0.5f;
v = (v + 1.0f) * 0.5f;
// GetCameraPixelBilinear(cameraPixels, cameraNum, u, v);
u *= cameraWidth;
v *= cameraHeight;
uint left = min(cameraWidth - 1u, (uint)floor(u)); // Modified to add check
uint right = min(cameraWidth - 1u, left + 1u);
uint top = min(cameraHeight - 1u, (uint)floor(v)); // Modified to add check
uint bottom = min(cameraHeight - 1u, top + 1u);
float uFrac = frac(u);
float vFrac = frac(v);
uint baseIdx = cameraNum * cameraWidth * cameraHeight;
uint topRow = baseIdx + top * cameraWidth;
uint bottomRow = baseIdx + bottom * cameraWidth;
uint topLeft = cameraPixels[topRow + left ];
uint topRight = cameraPixels[topRow + right];
uint bottomLeft = cameraPixels[bottomRow + left ];
uint bottomRight = cameraPixels[bottomRow + right];
float r = lerp(lerp( topLeft >> 16u , bottomLeft >> 16u , vFrac),
lerp( topRight >> 16u , bottomRight >> 16u , vFrac), uFrac);
float g = lerp(lerp((topLeft >> 8u) & 0xFFu, (bottomLeft >> 8u) & 0xFFu, vFrac),
lerp((topRight >> 8u) & 0xFFu, (bottomRight >> 8u) & 0xFFu, vFrac), uFrac);
float b = lerp(lerp( topLeft & 0xFFu, bottomLeft & 0xFFu, vFrac),
lerp( topRight & 0xFFu, bottomRight & 0xFFu, vFrac), uFrac);
totalColor += float4(r, g, b, 255.0f);
}
}
totalColor /= ssaaFactor * ssaaFactor;
result[(dtid.y * equirectangularWidth) + dtid.x] =
((uint)totalColor.r << 16u) | ((uint)totalColor.g << 8u) | (uint)totalColor.b;
}
[numthreads(32,32,1)] // Must match threadsX, threadsY in CapturePanorama.cs
void CubeMapToEquirectangularPositiveY (uint3 dtid : SV_DispatchThreadID)
{
if (dtid.x >= equirectangularWidth || dtid.y >= sliceHeight) // In case width/height not multiple of numthreads
return;
if (dtid.x == 0u && dtid.y == 0u)
result[sentinelIdx] = cameraPixels[cameraPixelsSentinelIdx]; // Sentinel value - set correctly only if set correctly in input buffer
static const uint cameraNum = 2; /* PositiveY */
static const float pi = 3.14159265f;
float4 totalColor = float4(0.0f, 0.0f, 0.0f, 0.0f);
uint2 pos = uint2(dtid.x, dtid.y + startY);
uint2 loopStart = pos * ssaaFactor;
uint2 loopEnd = loopStart + uint2(ssaaFactor, ssaaFactor);
for (uint y = loopStart.y; y < loopEnd.y; y++)
{
for (uint x = loopStart.x; x < loopEnd.x; x++)
{
float xcoord = (float)x / (equirectangularWidth * ssaaFactor);
float ycoord = (float)y / (equirectangularHeight * ssaaFactor);
float latitude = (ycoord - 0.5f) * pi;
float longitude = (xcoord * 2.0f - 1.0f) * pi;
float cosLat = cos(latitude);
float equirectRayDirectionX = cosLat * sin (longitude);
float equirectRayDirectionY = sin (latitude);
float equirectRayDirectionZ = cosLat * cos (longitude);
float distance = 1.0f / equirectRayDirectionY;
float u = equirectRayDirectionX * distance, v = equirectRayDirectionZ * distance;
u = (u + 1.0f) * 0.5f;
v = (v + 1.0f) * 0.5f;
// GetCameraPixelBilinear(cameraPixels, cameraNum, u, v);
u *= cameraWidth;
v *= cameraHeight;
uint left = (uint)floor(u);
uint right = min(cameraWidth - 1u, left + 1u);
uint top = (uint)floor(v);
uint bottom = min(cameraHeight - 1u, top + 1u);
float uFrac = frac(u);
float vFrac = frac(v);
uint baseIdx = cameraNum * cameraWidth * cameraHeight;
uint topRow = baseIdx + top * cameraWidth;
uint bottomRow = baseIdx + bottom * cameraWidth;
uint topLeft = cameraPixels[topRow + left ];
uint topRight = cameraPixels[topRow + right];
uint bottomLeft = cameraPixels[bottomRow + left ];
uint bottomRight = cameraPixels[bottomRow + right];
float r = lerp(lerp( topLeft >> 16u , bottomLeft >> 16u , vFrac),
lerp( topRight >> 16u , bottomRight >> 16u , vFrac), uFrac);
float g = lerp(lerp((topLeft >> 8u) & 0xFFu, (bottomLeft >> 8u) & 0xFFu, vFrac),
lerp((topRight >> 8u) & 0xFFu, (bottomRight >> 8u) & 0xFFu, vFrac), uFrac);
float b = lerp(lerp( topLeft & 0xFFu, bottomLeft & 0xFFu, vFrac),
lerp( topRight & 0xFFu, bottomRight & 0xFFu, vFrac), uFrac);
totalColor += float4(r, g, b, 255.0f);
}
}
totalColor /= ssaaFactor * ssaaFactor;
result[(dtid.y * equirectangularWidth) + dtid.x] =
((uint)totalColor.r << 16u) | ((uint)totalColor.g << 8u) | (uint)totalColor.b;
}
[numthreads(32,32,1)] // Must match threadsX, threadsY in CapturePanorama.cs
void CubeMapToEquirectangularNegativeY (uint3 dtid : SV_DispatchThreadID)
{
if (dtid.x >= equirectangularWidth || dtid.y >= sliceHeight) // In case width/height not multiple of numthreads
return;
if (dtid.x == 0u && dtid.y == 0u)
result[sentinelIdx] = cameraPixels[cameraPixelsSentinelIdx]; // Sentinel value - set correctly only if set correctly in input buffer
static const uint cameraNum = 3; /* NegativeY */
static const float pi = 3.14159265f;
float4 totalColor = float4(0.0f, 0.0f, 0.0f, 0.0f);
uint2 pos = uint2(dtid.x, dtid.y + startY);
uint2 loopStart = pos * ssaaFactor;
uint2 loopEnd = loopStart + uint2(ssaaFactor, ssaaFactor);
for (uint y = loopStart.y; y < loopEnd.y; y++)
{
for (uint x = loopStart.x; x < loopEnd.x; x++)
{
float xcoord = (float)x / (equirectangularWidth * ssaaFactor);
float ycoord = (float)y / (equirectangularHeight * ssaaFactor);
float latitude = (ycoord - 0.5f) * pi;
float longitude = (xcoord * 2.0f - 1.0f) * pi;
float cosLat = cos(latitude);
float equirectRayDirectionX = cosLat * sin (longitude);
float equirectRayDirectionY = sin (latitude);
float equirectRayDirectionZ = cosLat * cos (longitude);
float distance = 1.0f / equirectRayDirectionY;
float u = equirectRayDirectionX * distance, v = equirectRayDirectionZ * distance;
u = -u;
u = (u + 1.0f) * 0.5f;
v = (v + 1.0f) * 0.5f;
// GetCameraPixelBilinear(cameraPixels, cameraNum, u, v);
u *= cameraWidth;
v *= cameraHeight;
uint left = (uint)floor(u);
uint right = min(cameraWidth - 1u, left + 1u);
uint top = (uint)floor(v);
uint bottom = min(cameraHeight - 1u, top + 1u);
float uFrac = frac(u);
float vFrac = frac(v);
uint baseIdx = cameraNum * cameraWidth * cameraHeight;
uint topRow = baseIdx + top * cameraWidth;
uint bottomRow = baseIdx + bottom * cameraWidth;
uint topLeft = cameraPixels[topRow + left ];
uint topRight = cameraPixels[topRow + right];
uint bottomLeft = cameraPixels[bottomRow + left ];
uint bottomRight = cameraPixels[bottomRow + right];
float r = lerp(lerp( topLeft >> 16u , bottomLeft >> 16u , vFrac),
lerp( topRight >> 16u , bottomRight >> 16u , vFrac), uFrac);
float g = lerp(lerp((topLeft >> 8u) & 0xFFu, (bottomLeft >> 8u) & 0xFFu, vFrac),
lerp((topRight >> 8u) & 0xFFu, (bottomRight >> 8u) & 0xFFu, vFrac), uFrac);
float b = lerp(lerp( topLeft & 0xFFu, bottomLeft & 0xFFu, vFrac),
lerp( topRight & 0xFFu, bottomRight & 0xFFu, vFrac), uFrac);
totalColor += float4(r, g, b, 255.0f);
}
}
totalColor /= ssaaFactor * ssaaFactor;
result[(dtid.y * equirectangularWidth) + dtid.x] =
((uint)totalColor.r << 16u) | ((uint)totalColor.g << 8u) | (uint)totalColor.b;
}

View File

@ -0,0 +1,9 @@
fileFormatVersion: 2
guid: 3f65e701d3d2eb0419c22d8a8f17431c
timeCreated: 1437349860
licenseType: Store
ComputeShaderImporter:
currentBuildTarget: 5
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,256 @@
#pragma kernel RenderStereo
RWStructuredBuffer<uint> result;
RWStructuredBuffer<uint> forceWaitResultBuffer;
StructuredBuffer<uint> cameraPixels;
uint equirectangularWidth;
uint equirectangularHeight;
uint ssaaFactor;
uint cameraWidth;
uint cameraHeight;
float tanHalfHFov, tanHalfVFov, hFovAdjust, vFovAdjust, interpupillaryDistance, circleRadius;
uint numCirclePoints;
uint circlePointStart, circlePointEnd, circlePointCircularBufferStart, circlePointCircularBufferSize;
uint leftRightPass;
uint forceWaitValue;
uint cameraPixelsSentinelIdx;
[numthreads(32,32,1)] // Must match threadsX, threadsY in CapturePanorama.cs
void RenderStereo (uint3 dtid : SV_DispatchThreadID)
{
if (dtid.x >= equirectangularWidth || dtid.y >= equirectangularHeight) // In case width/height not multiple of numthreads
return;
if (dtid.x == equirectangularWidth - 1 && dtid.y == equirectangularHeight - 1 && dtid.z == 1)
{
forceWaitResultBuffer[0] = forceWaitValue; // Used on CPU side to force a wait for this operation to complete
result[equirectangularWidth * equirectangularHeight * 2] =
cameraPixels[cameraPixelsSentinelIdx]; // Sentinel value - set correctly only if set correctly in input buffer
}
static const float pi = 3.14159265f;
uint2 pos = dtid.xy;
uint2 loopStart = pos * ssaaFactor;
uint2 loopEnd = loopStart + uint2(ssaaFactor, ssaaFactor);
uint i = dtid.z;
float4 totalColor = float4(0.0f, 0.0f, 0.0f, 0.0f);
for (uint y = loopStart.y; y < loopEnd.y; y++)
{
for (uint x = loopStart.x; x < loopEnd.x; x++)
{
float xcoord = (float)x / (equirectangularWidth * ssaaFactor);
float ycoord = (float)y / (equirectangularHeight * ssaaFactor);
float latitude = (ycoord - 0.5f) * pi;
float sinLat, cosLat;
sincos(latitude, sinLat, cosLat);
float longitude = (xcoord * 2.0f - 1.0f) * pi;
float sinLong, cosLong;
sincos(longitude, sinLong, cosLong);
// Scale IPD down as latitude moves toward poles to avoid discontinuities
float latitudeNormalized = latitude / (pi / 2.0f); // Map to [-1, 1]
// float ipdScale = 1.0f;
// float ipdScale = 1.0f - latitudeNormalized * latitudeNormalized;
float ipdScale = 1.5819767068693265f * exp(-latitudeNormalized * latitudeNormalized) - 0.5819767068693265f;
// float ipdScale = 1.1565176427496657f * exp(-2.0f * latitudeNormalized * latitudeNormalized) - 0.15651764274966568f;
// float ipdScale = 1.0000454019910097f * exp(-10.0f * latitudeNormalized * latitudeNormalized) - 0.00004540199100968779f;
float scaledEyeRadius = ipdScale * interpupillaryDistance / 2.0f;
// The following is equivalent to:
// Quaternion eyesRotation = Quaternion.Euler(0.0f, longitude * 360.0f / (2 * pi), 0.0f);
// float3 initialEyePosition = (i == 0 ? float3.left : float3.right) * scaledEyeRadius;
// float3 pos = eyesRotation * initialEyePosition; // eye position
// float3 dir = eyesRotation * float3.forward; // gaze direction
float3 dir = float3(sinLong, 0.0f, cosLong);
// Find place on circle where gaze ray crosses circle.
// Simplest way to do this is solve it geometrically assuming longitude=0, then rotate.
float angle = (pi/2.0f - acos(scaledEyeRadius/circleRadius));
if (i == 0) angle = -angle;
float circlePointAngle = longitude + angle;
if (circlePointAngle < 0.0f) circlePointAngle += 2 * pi;
if (circlePointAngle >= 2 * pi) circlePointAngle -= 2 * pi;
float circlePointNumber = circlePointAngle / (2 * pi) * numCirclePoints;
uint circlePoint0 = (uint)floor(circlePointNumber);
if (circlePoint0 < circlePointStart)
circlePoint0 += numCirclePoints; // Deal with an edge case when doing final slice with SSAA > 1
if (circlePoint0 < circlePointStart || circlePoint0 + 1 >= circlePointEnd)
return;
uint cameraNum;
float u, v;
float ipdScaleLerp = 1.0f - ipdScale * 5.0f; // Scale [0, 0.2] to [0, 1] and reverse
// Top/bottom cap
float4 colorCap = float4(0, 0, 0, 0);
if (ipdScaleLerp > 0.0f)
{
float equirectRayDirectionX = cosLat * sinLong;
float equirectRayDirectionY = sinLat;
float equirectRayDirectionZ = cosLat * cosLong;
float distance = 1.0f / equirectRayDirectionY;
u = equirectRayDirectionX * distance; v = equirectRayDirectionZ * distance;
if (u * u <= 1 && v * v <= 1)
{
if (equirectRayDirectionY > 0.0f)
{
cameraNum = 0;
}
else
{
u = -u;
cameraNum = 1;
}
u = (u + 1.0f) * 0.5f;
v = (v + 1.0f) * 0.5f;
// GetCameraPixelBilinear(cameraPixels, cameraNum, u, v);
u *= cameraWidth;
v *= cameraHeight;
uint left = (uint)floor(u);
uint right = min(cameraWidth - 1, left + 1);
uint top = (uint)floor(v);
uint bottom = min(cameraHeight - 1, top + 1);
float uFrac = frac(u);
float vFrac = frac(v);
uint baseIdx = cameraNum * cameraWidth * cameraHeight;
uint topRow = baseIdx + top * cameraWidth;
uint bottomRow = baseIdx + bottom * cameraWidth;
uint topLeft = cameraPixels[topRow + left ];
uint topRight = cameraPixels[topRow + right];
uint bottomLeft = cameraPixels[bottomRow + left ];
uint bottomRight = cameraPixels[bottomRow + right];
float r = lerp(lerp( topLeft >> 16 , bottomLeft >> 16 , vFrac),
lerp( topRight >> 16 , bottomRight >> 16 , vFrac), uFrac);
float g = lerp(lerp((topLeft >> 8) & 0xFF, (bottomLeft >> 8) & 0xFF, vFrac),
lerp((topRight >> 8) & 0xFF, (bottomRight >> 8) & 0xFF, vFrac), uFrac);
float b = lerp(lerp( topLeft & 0xFF, bottomLeft & 0xFF, vFrac),
lerp( topRight & 0xFF, bottomRight & 0xFF, vFrac), uFrac);
float4 col = float4(r, g, b, 255.0f);
colorCap = col;
}
}
float4 color0 = float4(0, 0, 0, 0), color1 = float4(0, 0, 0, 0);
for (uint j=0; j < 2; j++)
{
uint circlePointIdx = (circlePoint0 + j) % numCirclePoints;
float cameraPointAngle = 2 * pi * circlePointIdx / numCirclePoints;
float sinCameraPointAngle, cosCameraPointAngle;
sincos(cameraPointAngle, sinCameraPointAngle, cosCameraPointAngle);
// Equivalent to (using fact that both dir and circlePointNorm are unit vectors):
// Quaternion circlePointRotation = Quaternion.Euler(0.0f, cameraPointAngle * 360.0f / (2 * pi), 0.0f);
// float3 circlePointNormal = circlePointRotation * float3.forward;
// float newLongitudeDegrees = sign(cross(circlePointNormal, dir).y) * angle(circlePointNormal, dir);
// Clamp here avoids numerical out-of-bounds trouble when circlePointAngle = longitude
float newLongitude = sign(dir.x * cosCameraPointAngle - dir.z * sinCameraPointAngle) *
acos(clamp(dir.z * cosCameraPointAngle + dir.x * sinCameraPointAngle, -1.0f, 1.0f));
float sinNewLong, cosNewLong;
sincos(newLongitude, sinNewLong, cosNewLong);
// Select which of the two cameras for this point to use and adjust ray to make camera plane perpendicular to axes
// 2 + because first two are top/bottom
uint cameraNumBase = 2 + ((circlePoint0 + j + circlePointCircularBufferStart - circlePointStart) % circlePointCircularBufferSize) * 2;
float3 textureRayDirAdjusted;
if (leftRightPass)
{
cameraNum = cameraNumBase + (newLongitude >= 0.0f ? 1 : 0);
float longitudeAdjust = (newLongitude >= 0.0f ? -hFovAdjust : hFovAdjust);
float longSum = newLongitude + longitudeAdjust;
float sinLongSum, cosLongSum;
sincos(longSum, sinLongSum, cosLongSum);
// Equivalent to:
// float3 textureRayDir = Quaternion.Euler(-latitude * 360.0f / (2 * pi), newLongitude * 360.0f / (2 * pi), 0.0f) * float3.forward;
// float3 textureRayDirAdjusted = Quaternion.Euler(0.0f, longitudeAdjust * 360.0f / (2 * pi), 0.0f) * textureRayDir;
textureRayDirAdjusted = float3(cosLat * sinLongSum, sinLat, cosLat * cosLongSum);
// float3 textureRayDirAdjusted = float3(
// sin(latitude + newLongitude + longitudeAdjust) - sinLat * cosLongSum,
// sinLat,
// cos(latitude + newLongitude + longitudeAdjust) + sinLat * sinLongSum);
}
else // if (!leftRightPass)
{
cameraNum = cameraNumBase + (latitude >= 0.0f ? 1 : 0);
float latitudeAdjust = (latitude >= 0.0f ? vFovAdjust : -vFovAdjust);
float sinLatAdjust, cosLatAdjust;
sincos(latitudeAdjust, sinLatAdjust, cosLatAdjust);
// Equivalent to:
// textureRayDirAdjusted = Quaternion.Euler(latitudeAdjust * 360.0f / (2 * pi), 0.0f, 0.0f) * textureRayDir;
textureRayDirAdjusted = float3(cosLat * sinNewLong,
cosLatAdjust * sinLat - cosLat * cosNewLong * sinLatAdjust,
sinLatAdjust * sinLat + cosLat * cosNewLong * cosLatAdjust);
}
u = textureRayDirAdjusted.x / textureRayDirAdjusted.z / tanHalfHFov;
v = -textureRayDirAdjusted.y / textureRayDirAdjusted.z / tanHalfVFov;
if (! (textureRayDirAdjusted.z > 0.0f && u * u <= 1.0f && v * v <= 1.0f) )
return;
u = (u + 1.0f) * 0.5f;
v = (v + 1.0f) * 0.5f;
// GetCameraPixelBilinear(cameraPixels, cameraNum, u, v);
u *= cameraWidth;
v *= cameraHeight;
uint left = (uint)floor(u);
uint right = min(cameraWidth - 1, left + 1);
uint top = (uint)floor(v);
uint bottom = min(cameraHeight - 1, top + 1);
float uFrac = frac(u);
float vFrac = frac(v);
uint baseIdx = cameraNum * cameraWidth * cameraHeight;
uint topRow = baseIdx + top * cameraWidth;
uint bottomRow = baseIdx + bottom * cameraWidth;
uint topLeft = cameraPixels[topRow + left ];
uint topRight = cameraPixels[topRow + right];
uint bottomLeft = cameraPixels[bottomRow + left ];
uint bottomRight = cameraPixels[bottomRow + right];
float r = lerp(lerp( topLeft >> 16 , bottomLeft >> 16 , vFrac),
lerp( topRight >> 16 , bottomRight >> 16 , vFrac), uFrac);
float g = lerp(lerp((topLeft >> 8) & 0xFF, (bottomLeft >> 8) & 0xFF, vFrac),
lerp((topRight >> 8) & 0xFF, (bottomRight >> 8) & 0xFF, vFrac), uFrac);
float b = lerp(lerp( topLeft & 0xFF, bottomLeft & 0xFF, vFrac),
lerp( topRight & 0xFF, bottomRight & 0xFF, vFrac), uFrac);
float4 col = float4(r, g, b, 255.0f);
if (j == 0) color0 = col; else color1 = col;
}
float4 c = lerp(color0, color1, frac(circlePointNumber));
if (colorCap.a > 0.0f && ipdScaleLerp > 0.0f)
c = lerp(c, colorCap, ipdScaleLerp);
totalColor += float4(c.r, c.g, c.b, 255.0f);
}
}
totalColor /= ssaaFactor * ssaaFactor;
result[((dtid.y + equirectangularHeight * i) * equirectangularWidth) + dtid.x] =
((uint)totalColor.r << 16) | ((uint)totalColor.g << 8) | (uint)totalColor.b;
}

View File

@ -0,0 +1,9 @@
fileFormatVersion: 2
guid: 3e480fbf28fbc684f8fe9ab2cef9dae1
timeCreated: 1438073809
licenseType: Store
ComputeShaderImporter:
currentBuildTarget: 5
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,30 @@
// Each #kernel tells which function to compile; you can have many kernels
#pragma kernel TextureToBuffer
Texture2D<float4> source;
RWStructuredBuffer<uint> result;
RWStructuredBuffer<uint> forceWaitResultBuffer;
uint width, height;
SamplerState MyPointRepeatSampler;
uint startIdx;
uint sentinelIdx;
uint forceWaitValue;
float gamma;
[numthreads(32,32,1)] // Must match threadsX, threadsY in CapturePanorama.cs
void TextureToBuffer (uint3 id : SV_DispatchThreadID)
{
if (id.x >= width || id.y >= height) // In case width/height not multiple of numthreads
return;
float4 color = source.SampleLevel(MyPointRepeatSampler, float2(((float)id.x + 0.5)/ width, ((float)id.y + 0.5)/ height), 0);
color = pow(color, gamma);
color *= 255.0;
result[startIdx + (id.y * width) + id.x] = ((int)color.r << 16u) | ((int)color.g << 8u) | (int)color.b;
if (id.x == width - 1u && id.y == height - 1u && id.z == 0u)
{
forceWaitResultBuffer[0] = forceWaitValue; // Used on CPU side to force a wait for this operation to complete
result[sentinelIdx] = 1419455993u; // Sentinel value - must match BufferSentinelValue in CapturePanorama.cs
}
}

View File

@ -0,0 +1,9 @@
fileFormatVersion: 2
guid: d1cd5a67a32f22748a935ca069e561c0
timeCreated: 1438086409
licenseType: Store
ComputeShaderImporter:
currentBuildTarget: 5
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,9 @@
fileFormatVersion: 2
guid: 01a5855c27b89474b989c86d9d005661
folderAsset: yes
timeCreated: 1438621068
licenseType: Store
DefaultImporter:
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,22 @@
Panorama Name=
Capture Key=P
Image Format=PNG
Capture Stereoscopic=false
Interpupillary Distance=0.0635
Num Circle Points=128
Panorama Width=8192
Anti Aliasing=8
Ssaa Factor=1
Save Image Path=
Save Cubemap=false
Upload Images=false
Use Default Orientation=false
Use Gpu Transform=true
Cpu Milliseconds Per Frame=8.33333
Capture Every Frame=false
Frame Rate=30
Max Frames To Record=
Frame Number Digits=6
Fade During Capture=true
Fade Time=0.25
Enable Debugging=false

View File

@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: e752742b4f609e8469983b80636c062c
timeCreated: 1437749012
licenseType: Store
DefaultImporter:
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,9 @@
fileFormatVersion: 2
guid: d5fc87615e05da54a956512369366f6b
folderAsset: yes
timeCreated: 1438621051
licenseType: Store
DefaultImporter:
userData:
assetBundleName:
assetBundleVariant:

BIN
Assets/CapturePanorama/Managed Plugins/System.Drawing.dll (Stored with Git LFS) Normal file

Binary file not shown.

View File

@ -0,0 +1,20 @@
fileFormatVersion: 2
guid: ab398dea78c472c4688bea711055aa56
timeCreated: 1433641128
licenseType: Store
PluginImporter:
serializedVersion: 1
iconMap: {}
executionOrder: {}
isPreloaded: 0
platformData:
Any:
enabled: 1
settings: {}
Editor:
enabled: 0
settings:
DefaultValueInitialized: true
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,9 @@
fileFormatVersion: 2
guid: ff9f323c0a0c77f46aa25cf5b93e45b7
folderAsset: yes
timeCreated: 1438620635
licenseType: Store
DefaultImporter:
userData:
assetBundleName:
assetBundleVariant:

Binary file not shown.

View File

@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: e642b458f4453e04ab7e2b711c58db98
timeCreated: 1432201072
licenseType: Store
NativeFormatImporter:
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,192 @@
Unity Script: 360 Panorama Capture
Version 1.3 - 2015 August 2 (Unity 5.1.2f1)
Captures a 360-degree panorama of the player's in-game surroundings and saves/uploads it for later viewing.
Requirements: This plugin currently requires Unity 5.x and a system supporting compute shaders. On PC, compute shaders require DirectX 11, Windows Vista or later, and a recent GPU capable of Shader Model 5.0.
CAPTURING 360 IMAGES
--------------------
1. Create an empty game object and add the Capture Panorama script (CapturePanorama.cs) to it.
2. Under Edit->Project Settings->Player->Other Settings->Optimization, set "Api Compatibility Level" from ".NET 2.0 Subset" to ".NET 2.0". If you don't do this, the script will work in editor but not in builds.
3. If your application is a VR application using the old Oculus VR plugin, uncomment the line "#define OVR_SUPPORT" at the top of CapturePanorama.cs. If you are using Unity native VR support (with or without Oculus Utils), this is unnecessary.
4. Run your application. Press P to capture a panorama. A sound will play and the screen will fade to black. When it completes, a second sound will play and an 8192x4096 PNG file will be saved in the application directory. You can capture programmatically with CaptureScreenshotAsync().
5. When you're ready, check the "Upload image" property to automatically upload all screenshots to the VRCHIVE panorama sharing website (http://alpha.vrchive.org).
If the procedure does not complete as expected, check the "Enable Debugging" property on the Capture Panorama script, build and run the application, and then send the resulting image if any and "output_log.txt" file from your data directory to the developer (eVRydayVR@gmail.com).
RECORDING 360 VIDEOS
--------------------
As of version 1.2, the "Capture Every Frame" option can be used to create 360 videos suitable for uploading to providers such as YouTube and Vrideo, or for viewing in Virtual Desktop or Gear VR. Steps follow.
Preparation:
* Make sure your application is able to run correctly when "Time.captureFramerate" is set to your desired video frame rate. Modify code which depends on the real clock time, e.g. waiting for a certain amount of time to pass, or waiting for audio events to complete, to instead use Time.deltaTime in Update().
* If you wish to capture gameplay and do not want to play the game at reduced frame rate during recording, implementing a replay system is recommended. This will also be useful for capturing the audio below.
Capture:
We will capture an image file for each frame of the video.
1. Check "Capture Every Frame" and enter the desired frame for capture in "Frame Rate".
2. The "BMP" image format provides the fastest capture. "PNG" will use less disk space.
3. Set "Save Image Path" to a fast disk with sufficient capacity for the raw video frames.
4. Set "Panorama Width" to the desired width of your video. Test your playback environment to ensure it supports the video size. Typically mono uses 4096 or 3840 and stereo uses 2048, 2160, 2880, or 3048.
5. Enable "Use Gpu Transform". Disable "Save Cubemap" unless you want cube map images for each frame.
6. For highest quality with slower encoding, increase "Ssaa Factor" to 2, 3, or 4.
7. Start the application and use the capture hotkey ("P" by default) to toggle between capturing and not capturing. You can also programmatically call StartCaptureEveryFrame() and StopCaptureEveryFrame().
8. Run the same scene again at normal speed and use any recording software to separately record the audio.
Creating video:
Install ffmpeg, add it to your path, and save the included asset "assemble.cmd" batch file with your image sequence.
Example invocation of this script from the command line:
assemble Test_2015-07-24_06-42-00-045_ bmp test.mp4 60 18 ultrafast
The parameters are:
* The prefix and extension of each filename (e.g. in this case the filenames were of the form "Test_2015-07-24_06-42-00-045_000001.bmp");
* The output filename;
* The output frame rate (should match the frame rate set during capture);
* The CRF quality, typical range 14 for very good with large file to 24 for mediocre with small file;
* The encoding preset which trades off encoding time and quality, with "ultrafast" producing the largest file the most quickly. This also avoids certain encoding failures.
Alternatively, in Adobe Premiere Pro you can import the sequence directly using these instructions:
https://helpx.adobe.com/premiere-pro/using/importing-still-images.html#import_numbered_still_image_sequences_as_video_clips
Once the video is created, add the audio track and edit as desired. For viewing in Virtual Desktop on Windows 8, be sure to render the final video at H.264 Level 5.1. For viewing on Gear VR, ensure no dimension exceeds 2160.
Encoding during capture will be added in future versions.
REFERENCE
---------
Properties on the Capture Panorama script:
* Panorama Name: Used as the prefix of the saved image filename. If "Upload Images" is enabled, this will appear in the title of the image on the web.
* Quality Setting: Selects which of the quality settings (under Project Settings->Quality) to use during capture. Highest is recommended. Default is to use the player's current setting.
* Capture Key (default "P"): the key to press to capture a 360 screenshot. If you wish to handle your own input, set this to "None" and invoke the CaptureScreenshotAsync() method from your script. If "Capture Every Frame" is enabled, this start and stop capturing of the image sequence.
* Image Format (default PNG): Determines what format(s) to save/upload the image file in. JPEG produces smaller filesize but is much lower quality. BMP is faster to save than PNG but larger.
* Capture Stereoscopic (default false): Captures a top/bottom (over/under) image suitable for stereoscopic (3D) viewing. May produce artifacts. Be sure to set Panorama Width, Interpupillary Distance, and Num Circle Points appropriately when enabling this option.
* Interpupillary Distance (stereoscopic only): Distance between the eye pupils of the viewer in Unity units. Defaults to average IPD in meters from U.S. Army survey.
* Num Circle Points (at least 8, stereoscopic only): Determines at how many points to capture the surroundings. Smaller values are faster while larger values reduce ghosting/doubling artifacts on nearby objects. A good starting point is Panorama Width divided by 32. Smaller values also counterintuitively require more graphics memory, while larger values require less.
* Panorama Width (between 4 and 23800, default 8192): Determines width of the resulting panorama image. Height of the image will be half this in mono mode, or equal to this in stereo mode. Typical reasonable values are 4096 and 8192. Need not be a power of two. If this is too large, black bars may appear in output images, indicating graphics memory has been exhausted.
* Anti Aliasing (default 8): Sets the MSAA anti-aliasing quality to use during rendering. Set to 1 if using deferred rendering.
* Ssaa Factor (default 1): Set to a larger value such as 2, 3, or 4 to render at a higher resolution and then downsample to produce the final image. Produces superior anti-aliasing at a large performance cost. In stereoscopic mode, Ssaa Factor > 1 uses more graphics memory.
* Save Image Path: Directory where screenshots will be saved. If blank, the root application directory will be used.
* Save Cubemap (default off): Check to save the six captured cubemap images to disk. Some viewing software can view these directly. Will increase capture time.
In stereoscopic mode this option will save all captured camera images (stereo cubemaps are not yet supported). The images saved will be first the bottom and top images, then for each circle point it will save 2 images, one turned left 45 degrees and one turned right 45 degrees. Then again for each circle point it will save 2 images, one turned up 45 degrees and one turned down 45 degrees. The viewing circle has diameter equal to IPD; the points are equally distributed starting at Z positive.
* Upload Images (default off): Check to automatically publish panorama screenshots to VRCHIVE for sharing with others immediately after taking them. Visit alpha.vrchive.com to view them. Panoramas are currently uploaded anonymously (not under a user account).
* Use Default Orientation (default off): Resets the camera to the default (directly forward) rotation/orientation before taking the screenshot. May interfere with correct compositing if you have multiple cameras with different rotations. In VR applications, this is usually unnecessary because the headset orientation is used instead to correct the camera orientation.
* Use Gpu Transform (default on): Specifies whether to use the fast GPU-based shader to convert the captured cubemap to the final equirectangular image.
* Cpu Milliseconds Per Frame: When "Use Gpu Transform" is disabled, this will determine the number of CPU milliseconds to spend each frame on processing the panorama.
* Capture Every Frame (default off): When enabled, the Capture Key will start and stop the capturing of every frame to an image sequence.
* Frame Rate (default 30): Sets the frame rate used during capturing when Capture Every Frame is enabled. Determines what Time.captureFramerate will be set to.
* Max Frames To Record: If nonzero, will automatically stop after capturing this many frames.
* Frame Number Digits: When Capture Every Frame is enabled, this determines the number of digits to use for the frame number in the filenames of the image sequence (default 6). If these digits are exceeded, more digits will be used as needed.
* Start Sound: The sound played at the beginning of panorama processing. May be None.
* Done Sound: The sound played at the end of panorama processing. May be None.
* Fade During Capture: Whether to fade the screen to a solid color during capture. Helps to reduce simulator sickness, especially if Panorama Width is large.
* Fade Time: How quickly to fade in/out. Affects total time needed for capture. A value of zero is not currently supported.
* Fade Color: Solid color to fade the screen to during capture.
* Fade Material: Material that will be placed in front of the camera during fade.
* Enable Debugging: Shows debugging logs and time-to-capture on the console.
CONFIG FILE
-----------
The ReadPanoConfig script allows users to modify panorama capture parameters in a build without modifying the source Unity project. To use it:
1. Add the ReadPanoConfig.cs script to the same object as the CapturePanorama.cs script.
2. After building, the first time the application is run, it will create "CapturePanorama.ini" in the data directory of the build, with settings initially equal to the settings specified in the Inspector for the Capture Panorama script.
3. To modify settings, modify the .ini file and restart the application.
DEVELOPMENT NOTES
-----------------
To extend the tool as needed for your application, you can subclass CapturePanorama.CapturePanorama and override virtual methods:
* OnCaptureStart(): Called at the very beginning of each capture
* GetCaptureCameras(): Allows you to control the set of cameras rendered to produce the view
* BeforeRenderPanorama(): Called right before rendering camera views
* AfterRenderPanorama(): Called right after rendering camera views
You can also provide your own MonoBehavior event handlers such as Start(), Update(), Awake(), etc. and then have them call the superclass version using "base.Start()", "base.Update()" etc. The use of subclassing will ease upgrading when future versions of the script are released.
In scenes using the OVR plugin, the left eye will be used by default as the point of rendering.
The package supports scenes with multiple cameras or OVR camera rigs, each with different culling masks. They will be composited based on depth to reproduce the player's view.
In some cases the stereo camera may clip into surrounding objects. Reduce near clip or move the camera farther away to alleviate this.
As of version 1.3 camera image effects will be reproduced.
If you need to determine if a panorama capture is in process (e.g. to wait for the capture to complete), you can check the "Capturing" property.
CREDITS
-------
Developed by D Coetzee of eVRydayVR: http://youtube.com/user/eVRydayVR
Funded by the panorama repository VRCHIVE: http://vrchive.com
Default sound effects Clicks_13, Xylo_13, and DistClickBlocked1 from:
Free SFX Package - Bleep Blop Audio
https://www.assetstore.unity3d.com/en/#!/content/5178
LICENSE
-------
This is free and unencumbered software released into the public domain.
Anyone is free to copy, modify, publish, use, compile, sell, or
distribute this software, either in source code form or as a compiled
binary, for any purpose, commercial or non-commercial, and by any
means.
In jurisdictions that recognize copyright laws, the author or authors
of this software dedicate any and all copyright interest in the
software to the public domain. We make this dedication for the benefit
of the public at large and to the detriment of our heirs and
successors. We intend this dedication to be an overt act of
relinquishment in perpetuity of all present and future rights to this
software under copyright law.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR
OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
For more information, please refer to <http://unlicense.org/>

View File

@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: 8ccb1bc2c1e41924f964f63490ad918a
timeCreated: 1433619554
licenseType: Store
TextScriptImporter:
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,9 @@
fileFormatVersion: 2
guid: 7ff819719ed240a48b07d539f2657cca
folderAsset: yes
timeCreated: 1438620459
licenseType: Store
DefaultImporter:
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,93 @@
// This is free and unencumbered software released into the public domain.
// For more information, please refer to <http://unlicense.org/>
using System;
using UnityEngine;
// Based on http://blog.andreaskahler.com/2009/06/creating-icosphere-mesh-in-code.html
// Currently unused but planned for use with future features like seam-free mono capture.
namespace CapturePanorama
{
public static class Icosphere
{
// Use this for initialization
public static Mesh BuildIcosphere(float radius, int iterations)
{
Mesh result = BuildIcosahedron(radius);
for (int i = 0; i < iterations; i++)
Refine(result);
return result;
}
public static Mesh BuildIcosahedron(float radius) // radius is distance to each vertex from origin
{
Mesh result = new Mesh();
// create 12 vertices of a icosahedron
float t = (float)((1.0 + Math.Sqrt(5.0)) / 2.0);
Vector3[] vertices = new Vector3[]
{
new Vector3(-1.0f, t, 0.0f),
new Vector3( 1.0f, t, 0.0f),
new Vector3(-1.0f, -t, 0.0f),
new Vector3( 1.0f, -t, 0.0f),
new Vector3( 0.0f, -1.0f, t),
new Vector3( 0.0f, 1.0f, t),
new Vector3( 0.0f, -1.0f, -t),
new Vector3( 0.0f, 1.0f, -t),
new Vector3( t, 0.0f, -1.0f),
new Vector3( t, 0.0f, 1.0f),
new Vector3( -t, 0.0f, -1.0f),
new Vector3( -t, 0.0f, 1.0f),
};
float scale = radius / new Vector3(1.0f, t, 0.0f).magnitude;
for (int i = 0; i < vertices.Length; i++)
vertices[i] *= scale;
result.vertices = vertices;
result.triangles = new int[]
{
// 5 faces around point 0
0, 11, 5,
0, 5, 1,
0, 1, 7,
0, 7, 10,
0, 10, 11,
// 5 adjacent faces
1, 5, 9,
5, 11, 4,
11, 10, 2,
10, 7, 6,
7, 1, 8,
// 5 faces around point 3
3, 9, 4,
3, 4, 2,
3, 2, 6,
3, 6, 8,
3, 8, 9,
// 5 adjacent faces
4, 9, 5,
2, 4, 11,
6, 2, 10,
8, 6, 7,
9, 8, 1,
};
return result;
}
private static void Refine(Mesh m)
{
throw new Exception("TODO");
}
}
}

View File

@ -0,0 +1,12 @@
fileFormatVersion: 2
guid: 0c30b5cc0105feb4a97840530aece901
timeCreated: 1437825051
licenseType: Store
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,83 @@
// This is free and unencumbered software released into the public domain.
// For more information, please refer to <http://unlicense.org/>
using System;
using System.Collections.Generic;
using System.Reflection;
using UnityEngine;
namespace CapturePanorama.Internals
{
class ImageEffectCopyCamera : MonoBehaviour
{
public struct InstanceMethodPair {
public object Instance;
public MethodInfo Method;
}
public List<InstanceMethodPair> onRenderImageMethods = new List<InstanceMethodPair>();
public static List<InstanceMethodPair> GenerateMethodList(Camera camToCopy)
{
var result = new List<InstanceMethodPair>();
foreach (var script in camToCopy.gameObject.GetComponents<MonoBehaviour>())
{
if (script.enabled)
{
Type scriptType = script.GetType();
MethodInfo m = scriptType.GetMethod("OnRenderImage",
BindingFlags.Instance | BindingFlags.Public | BindingFlags.NonPublic, null,
new Type[] { typeof(RenderTexture), typeof(RenderTexture) }, null);
if (m != null)
{
InstanceMethodPair pair = new InstanceMethodPair();
pair.Instance = script;
pair.Method = m;
result.Add(pair);
}
}
}
return result;
}
RenderTexture[] temp = new RenderTexture[] { null, null };
void OnDestroy()
{
for (int i = 0; i < temp.Length; i++)
{
if (temp[i] != null)
Destroy(temp[i]);
temp[i] = null;
}
}
void OnRenderImage(RenderTexture src, RenderTexture dest)
{
int desiredDepth = Math.Max(src.depth, dest.depth);
for (int i = 0; i < temp.Length; i++)
{
if (onRenderImageMethods.Count > i + 1)
{
if (temp[i] != null &&
(temp[i].width != dest.width || temp[i].height != dest.height || temp[i].depth != desiredDepth || temp[i].format != dest.format))
{
Destroy(temp[i]);
temp[i] = null;
}
if (temp[i] == null)
temp[i] = new RenderTexture(dest.width, dest.height, desiredDepth, dest.format);
}
}
var sequence = new List<RenderTexture>();
sequence.Add(src);
for (int i = 0; i < onRenderImageMethods.Count - 1; i++)
sequence.Add(i % 2 == 0 ? temp[0] : temp[1]);
sequence.Add(dest);
for (int i = 0; i < onRenderImageMethods.Count; i++)
onRenderImageMethods[i].Method.Invoke(onRenderImageMethods[i].Instance, new object[] { sequence[i], sequence[i + 1] });
}
}
}

View File

@ -0,0 +1,12 @@
fileFormatVersion: 2
guid: 89b4b90d697c75942976465b0500b667
timeCreated: 1438559789
licenseType: Store
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,120 @@
// This is free and unencumbered software released into the public domain.
// For more information, please refer to <http://unlicense.org/>
using System;
using System.IO;
using UnityEngine;
namespace CapturePanorama
{
public class ReadPanoConfig : MonoBehaviour
{
public string iniPath;
void Start()
{
if (Application.isEditor)
return;
CapturePanorama pano = GetComponent<CapturePanorama>();
string path = iniPath;
if (path == "")
{
string filename = "CapturePanorama.ini";
path = Application.dataPath + "/" + filename;
}
if (!File.Exists(path))
{
// INI file does not exist, creating instead
WriteConfig(path, pano);
return;
}
foreach (string line in File.ReadAllLines(path))
{
if (line.Trim() == "")
continue;
string[] splitLine = line.Split(new char[] { '=' }, 2);
string key = splitLine[0].Trim();
string val = splitLine[1].Trim();
if (key == "Panorama Name")
pano.panoramaName = val;
else if (key == "Capture Key")
pano.captureKey = (KeyCode)Enum.Parse(typeof(KeyCode), val);
else if (key == "Image Format")
pano.imageFormat = (CapturePanorama.ImageFormat)Enum.Parse(typeof(CapturePanorama.ImageFormat), val);
else if (key == "Capture Stereoscopic")
pano.captureStereoscopic = bool.Parse(val);
else if (key == "Interpupillary Distance")
pano.interpupillaryDistance = float.Parse(val);
else if (key == "Num Circle Points")
pano.numCirclePoints = int.Parse(val);
else if (key == "Panorama Width")
pano.panoramaWidth = int.Parse(val);
else if (key == "Anti Aliasing")
pano.antiAliasing = (CapturePanorama.AntiAliasing)int.Parse(val);
else if (key == "Ssaa Factor")
pano.ssaaFactor = int.Parse(val);
else if (key == "Save Image Path")
pano.saveImagePath = val;
else if (key == "Save Cubemap")
pano.saveCubemap = bool.Parse(val);
else if (key == "Upload Images")
pano.uploadImages = bool.Parse(val);
else if (key == "Use Default Orientation")
pano.useDefaultOrientation = bool.Parse(val);
else if (key == "Use Gpu Transform")
pano.useGpuTransform = bool.Parse(val);
else if (key == "Cpu Milliseconds Per Frame")
pano.cpuMillisecondsPerFrame = (float)double.Parse(val);
else if (key == "Capture Every Frame")
pano.captureEveryFrame = bool.Parse(val);
else if (key == "Frame Rate")
pano.frameRate = int.Parse(val);
else if (key == "Max Frames To Record")
pano.maxFramesToRecord = val == "" ? 0 : int.Parse(val);
else if (key == "Frame Number Digits")
pano.frameNumberDigits = int.Parse(val);
else if (key == "Fade During Capture")
pano.fadeDuringCapture = bool.Parse(val);
else if (key == "Fade Time")
pano.fadeTime = float.Parse(val);
else if (key == "Enable Debugging")
pano.enableDebugging = bool.Parse(val);
else
Debug.LogError("Unrecognized key in line in CapturePanorama.ini: " + line);
}
}
private void WriteConfig(string path, CapturePanorama pano)
{
using (var writer = new StreamWriter(path))
{
writer.WriteLine("Panorama Name" + "=" + pano.panoramaName);
writer.WriteLine("Capture Key" + "=" + pano.captureKey);
writer.WriteLine("Image Format" + "=" + pano.imageFormat);
writer.WriteLine("Capture Stereoscopic" + "=" + pano.captureStereoscopic);
writer.WriteLine("Interpupillary Distance" + "=" + pano.interpupillaryDistance);
writer.WriteLine("Num Circle Points" + "=" + pano.numCirclePoints);
writer.WriteLine("Panorama Width" + "=" + pano.panoramaWidth);
writer.WriteLine("Anti Aliasing" + "=" + (int)pano.antiAliasing);
writer.WriteLine("Ssaa Factor" + "=" + pano.ssaaFactor);
writer.WriteLine("Save Image Path" + "=" + pano.saveImagePath);
writer.WriteLine("Save Cubemap" + "=" + pano.saveCubemap);
writer.WriteLine("Upload Images" + "=" + pano.uploadImages);
writer.WriteLine("Use Default Orientation" + "=" + pano.useDefaultOrientation);
writer.WriteLine("Use Gpu Transform" + "=" + pano.useGpuTransform);
writer.WriteLine("Cpu Milliseconds Per Frame" + "=" + pano.cpuMillisecondsPerFrame);
writer.WriteLine("Capture Every Frame" + "=" + pano.captureEveryFrame);
writer.WriteLine("Frame Rate" + "=" + pano.frameRate);
writer.WriteLine("Max Frames To Record" + "=" + pano.maxFramesToRecord);
writer.WriteLine("Frame Number Digits" + "=" + pano.frameNumberDigits);
writer.WriteLine("Fade During Capture" + "=" + pano.fadeDuringCapture);
writer.WriteLine("Fade Time" + "=" + pano.fadeTime);
writer.WriteLine("Enable Debugging" + "=" + pano.enableDebugging);
}
}
}
}

View File

@ -0,0 +1,12 @@
fileFormatVersion: 2
guid: fa15e3b423ebd7c438d5e7c5c9e60a2a
timeCreated: 1437619670
licenseType: Store
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,32 @@
// This is free and unencumbered software released into the public domain.
// For more information, please refer to <http://unlicense.org/>
using UnityEngine;
namespace CapturePanorama.Internals
{
public class ScreenFadeControl : MonoBehaviour
{
public Material fadeMaterial = null;
// Based on OVRScreenFade
#if UNITY_ANDROID && !UNITY_EDITOR
void OnCustomPostRender()
#else
void OnPostRender()
#endif
{
fadeMaterial.SetPass(0);
GL.PushMatrix();
GL.LoadOrtho();
GL.Color(fadeMaterial.color);
GL.Begin(GL.QUADS);
GL.Vertex3(0f, 0f, -12f);
GL.Vertex3(0f, 1f, -12f);
GL.Vertex3(1f, 1f, -12f);
GL.Vertex3(1f, 0f, -12f);
GL.End();
GL.PopMatrix();
}
}
}

View File

@ -0,0 +1,12 @@
fileFormatVersion: 2
guid: d523dbe145a52c64aaf87e2287a4de1b
timeCreated: 1432449878
licenseType: Store
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,9 @@
fileFormatVersion: 2
guid: 05d29e2dd63ef2c46a252cfd09f922ba
folderAsset: yes
timeCreated: 1438620654
licenseType: Store
DefaultImporter:
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,102 @@
// Upgrade NOTE: replaced 'mul(UNITY_MATRIX_MVP,*)' with 'UnityObjectToClipPos(*)'
/************************************************************************************
Copyright : Copyright 2014 Oculus VR, LLC. All Rights reserved.
Licensed under the Oculus VR Rift SDK License Version 3.2 (the "License");
you may not use the Oculus VR Rift SDK except in compliance with the License,
which is provided at the time of installation or download, or which
otherwise accompanies this software in either electronic or hard copy form.
You may obtain a copy of the License at
http://www.oculusvr.com/licenses/LICENSE-3.2
Unless required by applicable law or agreed to in writing, the Oculus VR SDK
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
************************************************************************************/
// Unlit alpha-blended shader.
// - no lighting
// - no lightmap support
// - supports tint color
Shader "Unlit/Transparent HUD (CapturePanorama)" {
Properties
{
_MainTex ("Base (RGB), Alpha (A)", 2D) = "white" {}
_Color ("Main Color", Color) = (0.5,0.5,0.5,0.5)
}
SubShader
{
LOD 100
Tags
{
"Queue" = "Transparent+99"
"IgnoreProjector" = "True"
"RenderType" = "Transparent"
}
Cull Off
Lighting Off
ZTest Always
ZWrite Off
Fog { Mode Off }
Offset -1, -1
Blend SrcAlpha OneMinusSrcAlpha
Pass
{
CGPROGRAM
#pragma vertex vert
#pragma fragment frag
#include "UnityCG.cginc"
struct appdata_t
{
float4 vertex : POSITION;
float2 texcoord : TEXCOORD0;
fixed4 color : COLOR;
};
struct v2f
{
float4 vertex : SV_POSITION;
half2 texcoord : TEXCOORD0;
fixed4 color : COLOR;
};
sampler2D _MainTex;
float4 _MainTex_ST;
fixed4 _Color;
v2f vert (appdata_t v)
{
v2f o;
o.vertex = UnityObjectToClipPos(v.vertex);
o.texcoord = TRANSFORM_TEX(v.texcoord, _MainTex);
o.color = v.color;
return o;
}
fixed4 frag (v2f i) : COLOR
{
fixed4 col = tex2D(_MainTex, i.texcoord) * i.color * _Color;
return col;
}
ENDCG
}
}
}

View File

@ -0,0 +1,9 @@
fileFormatVersion: 2
guid: c4de1d28bf5f7a74bb80869d3b2a246c
timeCreated: 1433688734
licenseType: Store
ShaderImporter:
defaultTextures: []
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,9 @@
fileFormatVersion: 2
guid: f4f64c593f9b3404cb2b045e707feb20
folderAsset: yes
timeCreated: 1438620619
licenseType: Store
DefaultImporter:
userData:
assetBundleName:
assetBundleVariant:

BIN
Assets/CapturePanorama/Sounds/Clicks_13.wav (Stored with Git LFS) Normal file

Binary file not shown.

View File

@ -0,0 +1,19 @@
fileFormatVersion: 2
guid: 3b7a828a646174088aa48ee1db9c439c
AudioImporter:
serializedVersion: 6
defaultSettings:
loadType: 1
sampleRateSetting: 0
sampleRateOverride: 0
compressionFormat: 0
quality: 0
conversionMode: 0
platformSettingOverrides: {}
forceToMono: 0
preloadAudioData: 1
loadInBackground: 0
3D: 1
userData:
assetBundleName:
assetBundleVariant:

BIN
Assets/CapturePanorama/Sounds/DistClickBlocked1.wav (Stored with Git LFS) Normal file

Binary file not shown.

View File

@ -0,0 +1,20 @@
fileFormatVersion: 2
guid: b4ed406e6e315450f85fa2c0f65bce01
AudioImporter:
serializedVersion: 6
defaultSettings:
loadType: 1
sampleRateSetting: 0
sampleRateOverride: 0
compressionFormat: 0
quality: 0
conversionMode: 0
platformSettingOverrides: {}
forceToMono: 0
normalize: 1
preloadAudioData: 1
loadInBackground: 0
3D: 1
userData:
assetBundleName:
assetBundleVariant:

BIN
Assets/CapturePanorama/Sounds/Xylo_13.wav (Stored with Git LFS) Normal file

Binary file not shown.

View File

@ -0,0 +1,19 @@
fileFormatVersion: 2
guid: a22b8c429f521429084d616c68dda19a
AudioImporter:
serializedVersion: 6
defaultSettings:
loadType: 1
sampleRateSetting: 0
sampleRateOverride: 0
compressionFormat: 0
quality: 0
conversionMode: 0
platformSettingOverrides: {}
forceToMono: 0
preloadAudioData: 1
loadInBackground: 0
3D: 1
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1 @@
ffmpeg -framerate %4 -i %1%%06d.%2 -an -c:v libx264 -r %4 -pix_fmt yuv420p -preset %6 -crf %5 %3

View File

@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: aadd2790d28a4fb46a82d27bd8ead3f6
timeCreated: 1437750855
licenseType: Store
DefaultImporter:
userData:
assetBundleName:
assetBundleVariant: