pnperspectivefx normalize option

This commit is contained in:
shun-iwasawa 2021-02-13 10:00:13 +09:00 committed by Jeremy Bullock
parent 66d04c60f5
commit 376f05b966
5 changed files with 340 additions and 350 deletions

View file

@ -1191,6 +1191,7 @@
<item>"STD_iwa_PNPerspectiveFx.eyeLevel" "Eye Level"</item>
<item>"STD_iwa_PNPerspectiveFx.alpha_rendering" "Alpha Rendering"</item>
<item>"STD_iwa_PNPerspectiveFx.waveHeight" "Wave Height"</item>
<item>"STD_iwa_PNPerspectiveFx.normalize_fresnel" "Normalize Intensity"</item>
<item>"STD_iwa_SoapBubbleFx" "SoapBubble Iwa" </item>
<item>"STD_iwa_SoapBubbleFx.renderMode" "Render Mode" </item>

View file

@ -13,6 +13,16 @@
<control>fov</control>
<control>eyeLevel</control>
<control>alpha_rendering</control>
<vbox modeSensitive="renderMode" mode="2,3,4">
<control>waveHeight</control>
</vbox>
<vbox modeSensitive="renderMode" mode="3">
<control>normalize_fresnel</control>
<control>normalize_margin</control>
</vbox>
<visibleToggle>
<controller>normalize_fresnel</controller>
<on>normalize_margin</on>
</visibleToggle>
</page>
</fxlayout>

View file

@ -4,25 +4,25 @@
#define IWA_FRESNEL_H
/* Fresnel反射率のテーブル0°〜90° 1°刻み) */
static float fresnel[91] = {
0.020059312f, 0.020059313f, 0.020059328f, 0.020059394f, 0.020059572f,
0.020059947f, 0.020060633f, 0.020061768f, 0.020063519f, 0.020066082f,
0.020069685f, 0.020074587f, 0.020081084f, 0.020089508f, 0.020100231f,
0.020113671f, 0.020130291f, 0.020150605f, 0.020175182f, 0.020204654f,
0.020239715f, 0.020281134f, 0.020329757f, 0.020386517f, 0.020452442f,
0.020528662f, 0.020616424f, 0.020717098f, 0.020832193f, 0.02096337f,
0.021112458f, 0.021281471f, 0.021472628f, 0.021688372f, 0.021931397f,
0.022204671f, 0.022511467f, 0.022855398f, 0.023240448f, 0.023671017f,
0.024151962f, 0.024688653f, 0.025287023f, 0.025953633f, 0.026695742f,
0.027521384f, 0.028439454f, 0.029459807f, 0.030593365f, 0.031852239f,
0.033249863f, 0.034801146f, 0.036522642f, 0.038432743f, 0.040551883f,
0.042902788f, 0.045510732f, 0.048403845f, 0.051613442f, 0.055174404f,
0.059125599f, 0.063510357f, 0.068377002f, 0.073779452f, 0.079777891f,
0.086439526f, 0.093839443f, 0.102061562f, 0.111199722f, 0.121358904f,
0.132656604f, 0.145224399f, 0.159209711f, 0.174777806f, 0.192114072f,
0.211426604f, 0.232949158f, 0.256944521f, 0.283708376f, 0.313573743f,
0.346916096f, 0.384159282f, 0.425782383f, 0.472327717f, 0.524410184f,
0.582728245f, 0.648076867f, 0.721362859f, 0.803623128f, 0.896046505f,
1.0f};
static double fresnel[91] = {
0.020059312, 0.020059313, 0.020059328, 0.020059394, 0.020059572,
0.020059947, 0.020060633, 0.020061768, 0.020063519, 0.020066082,
0.020069685, 0.020074587, 0.020081084, 0.020089508, 0.020100231,
0.020113671, 0.020130291, 0.020150605, 0.020175182, 0.020204654,
0.020239715, 0.020281134, 0.020329757, 0.020386517, 0.020452442,
0.020528662, 0.020616424, 0.020717098, 0.020832193, 0.020963370,
0.021112458, 0.021281471, 0.021472628, 0.021688372, 0.021931397,
0.022204671, 0.022511467, 0.022855398, 0.023240448, 0.023671017,
0.024151962, 0.024688653, 0.025287023, 0.025953633, 0.026695742,
0.027521384, 0.028439454, 0.029459807, 0.030593365, 0.031852239,
0.033249863, 0.034801146, 0.036522642, 0.038432743, 0.040551883,
0.042902788, 0.045510732, 0.048403845, 0.051613442, 0.055174404,
0.059125599, 0.063510357, 0.068377002, 0.073779452, 0.079777891,
0.086439526, 0.093839443, 0.102061562, 0.111199722, 0.121358904,
0.132656604, 0.145224399, 0.159209711, 0.174777806, 0.192114072,
0.211426604, 0.232949158, 0.256944521, 0.283708376, 0.313573743,
0.346916096, 0.384159282, 0.425782383, 0.472327717, 0.524410184,
0.582728245, 0.648076867, 0.721362859, 0.803623128, 0.896046505,
1.0};
#endif

View file

@ -1,6 +1,6 @@
/*------------------------------------------------------------
Iwa_PNPerspectiveFx
PerlinNoise/SimplexNoiseパターンを生成
render perspective noise pattern.
------------------------------------------------------------*/
#include "iwa_pnperspectivefx.h"
@ -19,32 +19,34 @@ namespace {
const double M_PI = 3.1415926535897932384626433832795;
#endif
/* 内積を返す */
inline float dot(float3 a, float3 b) {
inline double dot(double3 a, double3 b) {
return a.x * b.x + a.y * b.y + a.z * b.z;
}
/* 外積を返す */
inline float3 cross(float3 a, float3 b) {
float3 ret = {a.y * b.z - a.z * b.y, a.z * b.x - a.x * b.z,
inline double3 cross(double3 a, double3 b) {
double3 ret = {a.y * b.z - a.z * b.y, a.z * b.x - a.x * b.z,
a.x * b.y - a.y * b.x};
return ret;
}
/* 正規化する */
inline float3 normalize(float3 v) {
float length = sqrtf(v.x * v.x + v.y * v.y + v.z * v.z);
float3 ret = {v.x / length, v.y / length, v.z / length};
inline double3 normalize(double3 v) {
double length = std::sqrt(v.x * v.x + v.y * v.y + v.z * v.z);
double3 ret = {v.x / length, v.y / length, v.z / length};
return ret;
}
double getFresnel(double deg) {
if (deg < 0.0) return 0.0;
if (deg >= 90.0) return 1.0;
int index = (int)std::floor(deg);
double ratio = deg - (double)index;
return fresnel[index] * (1.0 - ratio) + fresnel[index + 1] * ratio;
}
/*------------------------------------------------------------
Channel値に変換して格納
------------------------------------------------------------*/
} // namespace
//------------------------------------------------------------
template <typename RASTER, typename PIXEL>
void Iwa_PNPerspectiveFx::setOutputRaster(float4 *srcMem, const RASTER dstRas,
void Iwa_PNPerspectiveFx::setOutputRaster(double4 *srcMem, const RASTER dstRas,
TDimensionI dim, int drawLevel,
const bool alp_rend_sw) {
typename PIXEL::Channel halfChan =
@ -54,113 +56,118 @@ void Iwa_PNPerspectiveFx::setOutputRaster(float4 *srcMem, const RASTER dstRas,
dstRas->fill(PIXEL(halfChan, halfChan, halfChan, halfChan));
else
dstRas->fill(PIXEL(halfChan, halfChan, halfChan));
float4 *chan_p = srcMem;
double4 *chan_p = srcMem;
for (int j = 0; j < drawLevel; j++) {
PIXEL *pix = dstRas->pixels(j);
for (int i = 0; i < dstRas->getLx(); i++, chan_p++, pix++) {
float val;
val = (*chan_p).x * (float)PIXEL::maxChannelValue + 0.5f;
pix->r = (typename PIXEL::Channel)((val > (float)PIXEL::maxChannelValue)
? (float)PIXEL::maxChannelValue
double val;
val = (*chan_p).x * (double)PIXEL::maxChannelValue + 0.5;
pix->r = (typename PIXEL::Channel)((val > (double)PIXEL::maxChannelValue)
? (double)PIXEL::maxChannelValue
: val);
val = (*chan_p).y * (float)PIXEL::maxChannelValue + 0.5f;
pix->g = (typename PIXEL::Channel)((val > (float)PIXEL::maxChannelValue)
? (float)PIXEL::maxChannelValue
val = (*chan_p).y * (double)PIXEL::maxChannelValue + 0.5;
pix->g = (typename PIXEL::Channel)((val > (double)PIXEL::maxChannelValue)
? (double)PIXEL::maxChannelValue
: val);
val = (*chan_p).z * (float)PIXEL::maxChannelValue + 0.5f;
pix->b = (typename PIXEL::Channel)((val > (float)PIXEL::maxChannelValue)
? (float)PIXEL::maxChannelValue
val = (*chan_p).z * (double)PIXEL::maxChannelValue + 0.5;
pix->b = (typename PIXEL::Channel)((val > (double)PIXEL::maxChannelValue)
? (double)PIXEL::maxChannelValue
: val);
val = (*chan_p).w * (float)PIXEL::maxChannelValue + 0.5f;
pix->m = (typename PIXEL::Channel)((val > (float)PIXEL::maxChannelValue)
? (float)PIXEL::maxChannelValue
val = (*chan_p).w * (double)PIXEL::maxChannelValue + 0.5;
pix->m = (typename PIXEL::Channel)((val > (double)PIXEL::maxChannelValue)
? (double)PIXEL::maxChannelValue
: val);
}
}
}
/*------------------------------------------------------------
PerlinNoiseのパラメータを取得
------------------------------------------------------------*/
//------------------------------------------------------------
// obtain parameters
void Iwa_PNPerspectiveFx::getPNParameters(TTile &tile, double frame,
const TRenderSettings &settings,
PN_Params &params,
TDimensionI &dimOut) {
/* 動作パラメータを得る */
params.renderMode = m_renderMode->getValue();
params.noiseType = m_noiseType->getValue();
params.size = (float)m_size->getValue(frame);
/* SimplexNoiseの密度感をそろえるための係数をかける */
if (params.noiseType == 1) params.size *= 1.41421356f;
params.renderMode = (PN_Params::RenderMode)m_renderMode->getValue();
params.noiseType = (PN_Params::NoiseType)m_noiseType->getValue();
params.size = m_size->getValue(frame);
// adjust size of Simplex Noise to be with the same density as Perlin Noise
if (params.noiseType == PN_Params::Simplex) params.size *= std::sqrt(2.0);
params.octaves = m_octaves->getValue() + 1;
params.offset = float2{(float)m_offset->getValue(frame).x,
(float)m_offset->getValue(frame).y};
params.p_intensity = (float)m_persistance_intensity->getValue(frame);
params.p_size = (float)m_persistance_size->getValue(frame);
params.p_offset = (float)m_persistance_offset->getValue(frame);
TPointD _eyeLevel = m_eyeLevel->getValue(frame);
params.eyeLevel = float2{(float)_eyeLevel.x, (float)_eyeLevel.y};
params.offset = m_offset->getValue(frame);
params.p_intensity = m_persistance_intensity->getValue(frame);
params.p_size = m_persistance_size->getValue(frame);
params.p_offset = m_persistance_offset->getValue(frame);
params.eyeLevel = m_eyeLevel->getValue(frame);
params.alp_rend_sw = m_alpha_rendering->getValue();
params.waveHeight = (float)m_waveHeight->getValue(frame);
params.waveHeight = m_waveHeight->getValue(frame);
const float fov = (float)m_fov->getValue(frame);
double fov = m_fov->getValue(frame);
TAffine aff = settings.m_affine;
const double scale = 1.0 / sqrt(fabs(aff.det()));
TAffine aff_pn = TScale(scale) * TTranslation(tile.m_pos);
double scale = 1.0 / std::sqrt(std::abs(aff.det()));
params.aff = TScale(scale) * TTranslation(tile.m_pos);
params.a11 = aff_pn.a11;
params.a12 = aff_pn.a12;
params.a13 = aff_pn.a13;
params.a21 = aff_pn.a21;
params.a22 = aff_pn.a22;
params.a23 = aff_pn.a23;
params.time = (float)m_evolution->getValue(frame) * 0.05;
params.p_evolution = (float)m_persistance_evolution->getValue(frame);
params.time = m_evolution->getValue(frame) * 0.05;
params.p_evolution = m_persistance_evolution->getValue(frame);
TPointD eyePoint =
aff * _eyeLevel - (tile.m_pos + tile.getRaster()->getCenterD());
const float eyeHeight = (float)eyePoint.y;
/* 描画範囲の下からの距離 */
params.drawLevel = (int)((float)dimOut.ly / 2.0f + eyeHeight);
aff * params.eyeLevel - (tile.m_pos + tile.getRaster()->getCenterD());
double eyeHeight = eyePoint.y;
// distance from the bottom of the render region to the eye level
params.drawLevel = (int)((double)dimOut.ly / 2.0 + eyeHeight);
if (params.drawLevel > dimOut.ly) params.drawLevel = dimOut.ly;
//------------------------------------------------------------
/* カメラたて方向のmmサイズの半分の寸法 */
int camHeight = settings.m_cameraBox.getLy();
TPointD vec_p0p1((double)camHeight * aff_pn.a12,
(double)camHeight * aff_pn.a22);
params.fy_2 = sqrtf(vec_p0p1.x * vec_p0p1.x + vec_p0p1.y * vec_p0p1.y) / 2.0f;
TPointD vec_p0p1((double)camHeight * params.aff.a12,
(double)camHeight * params.aff.a22);
params.fy_2 =
std::sqrt(vec_p0p1.x * vec_p0p1.x + vec_p0p1.y * vec_p0p1.y) / 2.0;
float fov_radian_2 = (fov / 2.0f) * float(M_PI_180);
double fov_radian_2 = (fov / 2.0) * M_PI_180;
/* カメラから投影面への距離 */
float D = params.fy_2 / tanf(fov_radian_2);
/* カメラから、投影面上の水平線への距離 */
params.A = sqrtf(params.eyeLevel.y * params.eyeLevel.y + D * D);
// distance from the camera to the center of projection plane
double D = params.fy_2 / std::tan(fov_radian_2);
// distance from the camera to the projected point of the horizon
params.A = std::sqrt(params.eyeLevel.y * params.eyeLevel.y + D * D);
/* カメラ位置から下枠へのベクトルと、水平線のなす角度 */
float theta = fov_radian_2 + asinf(params.eyeLevel.y / params.A);
// angle between horizon and the vector [ camera center - bottom of the camera
// frame ]
double theta = fov_radian_2 + std::asin(params.eyeLevel.y / params.A);
float M = params.fy_2 / sinf(fov_radian_2);
double M = params.fy_2 / std::sin(fov_radian_2);
params.cam_pos = float3{0.0f, -M * cosf(theta), M * sinf(theta)};
params.cam_pos = double3{0.0, -M * std::cos(theta), M * std::sin(theta)};
/*ベースとなるフレネル反射率を求める*/
params.base_fresnel_ref = 0.0f;
float phi = 90.0f - theta * 180.0f / M_PI;
if (phi >= 0.0f && phi < 90.0f) {
int index = (int)phi;
float ratio = phi - (float)index;
params.base_fresnel_ref =
fresnel[index] * (1.0f - ratio) + fresnel[index + 1] * ratio;
// compute normalize range
params.base_fresnel_ref = 0.0;
params.top_fresnel_ref = 1.0;
if (params.renderMode == PN_Params::Fresnel &&
m_normalize_fresnel->getValue()) {
double phi = 90.0 - theta * M_180_PI;
params.base_fresnel_ref = getFresnel(phi);
// frsnel value at the upper-side corner of the camera frame
double fx_2 =
params.fy_2 * (double)settings.m_cameraBox.getLx() / (double)camHeight;
double side_A = std::sqrt(fx_2 * fx_2 + params.A * params.A);
double top_theta = -fov_radian_2 + std::asin(params.eyeLevel.y / side_A);
phi = 90.0 - top_theta * M_180_PI;
params.top_fresnel_ref = getFresnel(phi);
double marginRatio = m_normalize_margin->getValue(frame);
// adding margin
double margin =
(params.top_fresnel_ref - params.base_fresnel_ref) * marginRatio;
params.base_fresnel_ref = std::max(0.0, params.base_fresnel_ref - margin);
params.top_fresnel_ref = std::min(1.0, params.top_fresnel_ref + margin);
}
/*強度の正規化のため、合計値を算出*/
float intensity = 2.0f; /* -1 1 */
params.int_sum = 0.0f;
// for normalizing intensity
double intensity = 2.0; // from -1 to 1
params.int_sum = 0.0;
for (int o = 0; o < params.octaves; o++) {
params.int_sum += intensity;
intensity *= params.p_intensity;
@ -170,8 +177,8 @@ void Iwa_PNPerspectiveFx::getPNParameters(TTile &tile, double frame,
//------------------------------------------------------------
Iwa_PNPerspectiveFx::Iwa_PNPerspectiveFx()
: m_renderMode(new TIntEnumParam(0, "Noise"))
, m_noiseType(new TIntEnumParam(0, "Perlin Noise"))
: m_renderMode(new TIntEnumParam(PN_Params::Noise, "Noise"))
, m_noiseType(new TIntEnumParam(PN_Params::Perlin, "Perlin Noise"))
, m_size(10.0)
, m_evolution(0.0)
, m_octaves(new TIntEnumParam(0, "1"))
@ -183,7 +190,9 @@ Iwa_PNPerspectiveFx::Iwa_PNPerspectiveFx()
, m_fov(30)
, m_eyeLevel(TPointD(0, 0))
, m_alpha_rendering(true)
, m_waveHeight(10.0) {
, m_waveHeight(10.0)
, m_normalize_fresnel(false)
, m_normalize_margin(0.1) {
bindParam(this, "renderMode", m_renderMode);
bindParam(this, "noiseType", m_noiseType);
bindParam(this, "size", m_size);
@ -198,13 +207,15 @@ Iwa_PNPerspectiveFx::Iwa_PNPerspectiveFx()
bindParam(this, "eyeLevel", m_eyeLevel);
bindParam(this, "alpha_rendering", m_alpha_rendering);
bindParam(this, "waveHeight", m_waveHeight);
bindParam(this, "normalize_fresnel", m_normalize_fresnel);
bindParam(this, "normalize_margin", m_normalize_margin);
m_noiseType->addItem(1, "Simplex Noise");
m_noiseType->addItem(PN_Params::Simplex, "Simplex Noise");
m_renderMode->addItem(1, "Noise (no resampled)");
m_renderMode->addItem(2, "Warp HV offset");
m_renderMode->addItem(4, "Warp HV offset 2");
m_renderMode->addItem(3, "Fresnel reflectivity");
m_renderMode->addItem(PN_Params::Noise_NoResample, "Noise (no resampled)");
m_renderMode->addItem(PN_Params::WarpHV, "Warp HV offset");
m_renderMode->addItem(PN_Params::WarpHV2, "Warp HV offset 2");
m_renderMode->addItem(PN_Params::Fresnel, "Fresnel reflectivity");
m_size->setMeasureName("fxLength");
m_size->setValueRange(0.0, 1000.0);
@ -231,6 +242,7 @@ Iwa_PNPerspectiveFx::Iwa_PNPerspectiveFx()
m_waveHeight->setMeasureName("fxLength");
m_waveHeight->setValueRange(1.0, 100.0);
m_normalize_margin->setValueRange(0.0, 3.0);
}
//------------------------------------------------------------
@ -251,35 +263,33 @@ bool Iwa_PNPerspectiveFx::canHandle(const TRenderSettings &info, double frame) {
void Iwa_PNPerspectiveFx::doCompute(TTile &tile, double frame,
const TRenderSettings &settings) {
/* サポートしていないPixelタイプはエラーを投げる */
if (!((TRaster32P)tile.getRaster()) && !((TRaster64P)tile.getRaster())) {
throw TRopException("unsupported input pixel type");
}
TDimensionI dimOut(tile.getRaster()->getLx(), tile.getRaster()->getLy());
/* PerinNoiseのパラメータ */
// obtain parameters
PN_Params pnParams;
getPNParameters(tile, frame, settings, pnParams, dimOut);
/* 水平線が画面より下のときreturn */
// return if the horizon is below the rendering area
if (pnParams.drawLevel < 0) {
tile.getRaster()->clear();
return;
}
const float evolution = (float)m_evolution->getValue(frame);
const float p_evolution = (float)m_persistance_evolution->getValue(frame);
double evolution = m_evolution->getValue(frame);
double p_evolution = m_persistance_evolution->getValue(frame);
float4 *out_host;
/* ホストのメモリ確保 */
TRasterGR8P out_host_ras(sizeof(float4) * dimOut.lx, pnParams.drawLevel);
double4 *out_host;
// allocate buffer
TRasterGR8P out_host_ras(sizeof(double4) * dimOut.lx, pnParams.drawLevel);
out_host_ras->lock();
out_host = (float4 *)out_host_ras->getRawData();
out_host = (double4 *)out_host_ras->getRawData();
doCompute_CPU(tile, frame, settings, out_host, dimOut, pnParams);
/* 出力結果をChannel値に変換して格納 */
tile.getRaster()->clear();
TRaster32P outRas32 = (TRaster32P)tile.getRaster();
TRaster64P outRas64 = (TRaster64P)tile.getRaster();
@ -296,71 +306,67 @@ void Iwa_PNPerspectiveFx::doCompute(TTile &tile, double frame,
//------------------------------------------------------------
void Iwa_PNPerspectiveFx::doCompute_CPU(TTile &tile, double frame,
const TRenderSettings &settings,
float4 *out_host, TDimensionI &dimOut,
double4 *out_host, TDimensionI &dimOut,
PN_Params &pnParams) {
/* モードで分ける */
if (pnParams.renderMode == 0 || pnParams.renderMode == 1) {
if (pnParams.renderMode == PN_Params::Noise ||
pnParams.renderMode == PN_Params::Noise_NoResample) {
calcPerinNoise_CPU(out_host, dimOut, pnParams,
(bool)(pnParams.renderMode == 0));
} else if (pnParams.renderMode == 2 || pnParams.renderMode == 3 ||
pnParams.renderMode == 4) {
pnParams.renderMode == PN_Params::Noise);
} else if (pnParams.renderMode == PN_Params::WarpHV ||
pnParams.renderMode == PN_Params::Fresnel ||
pnParams.renderMode == PN_Params::WarpHV2) {
calcPNNormal_CPU(out_host, dimOut, pnParams);
if (pnParams.renderMode == 4) {
if (pnParams.renderMode == PN_Params::WarpHV2) {
calcPNNormal_CPU(out_host, dimOut, pnParams, true);
}
}
}
/*------------------------------------------------------------
CPU計算
------------------------------------------------------------*/
void Iwa_PNPerspectiveFx::calcPerinNoise_CPU(float4 *out_host,
//------------------------------------------------------------
// render for 2 Noise modes
void Iwa_PNPerspectiveFx::calcPerinNoise_CPU(double4 *out_host,
TDimensionI &dimOut, PN_Params &p,
bool doResample) {
int reso = (doResample) ? 10 : 1;
/* 結果を収めるイテレータ */
float4 *out_p = out_host;
/* 各ピクセルについて */
double4 *out_p = out_host;
// compute for each pixel
for (int yy = 0; yy < p.drawLevel; yy++) {
for (int xx = 0; xx < dimOut.lx; xx++, out_p++) {
float val_sum = 0.0f;
double val_sum = 0.0;
int count = 0;
/* 各リサンプル点について */
// for each sampling points
for (int tt = 0; tt < reso; tt++) {
for (int ss = 0; ss < reso; ss++) {
float2 tmpPixPos = {
(float)xx - 0.5f + ((float)ss + 0.5f) / (float)reso,
(float)yy - 0.5f + ((float)tt + 0.5f) / (float)reso};
float2 screenPos = {
tmpPixPos.x * p.a11 + tmpPixPos.y * p.a12 + p.a13,
tmpPixPos.x * p.a21 + tmpPixPos.y * p.a22 + p.a23};
/* ② Perlin Noise 平面上の座標を計算する */
float2 noisePos;
TPointD tmpPixPos(
(double)xx - 0.5 + ((double)ss + 0.5) / (double)reso,
(double)yy - 0.5 + ((double)tt + 0.5) / (double)reso);
TPointD screenPos = p.aff * tmpPixPos;
// compute coordinate on the noise plane
TPointD noisePos;
noisePos.x = -(p.eyeLevel.y + p.fy_2) * (screenPos.x - p.eyeLevel.x) /
(screenPos.y - p.eyeLevel.y) +
p.eyeLevel.x;
noisePos.y =
(p.fy_2 + screenPos.y) * p.A / (p.eyeLevel.y - screenPos.y);
float tmpVal = 0.5f;
float currentSize = p.size;
float2 currentOffset = p.offset;
float currentIntensity = 1.0f;
// float2* basis_p = basis;
double tmpVal = 0.5;
double currentSize = p.size;
TPointD currentOffset = p.offset;
double currentIntensity = 1.0;
float currentEvolution = p.time;
double currentEvolution = p.time;
/* ノイズを各世代足しこむ */
// sum noise values
for (int o = 0; o < p.octaves; o++) {
float2 currentNoisePos = {
(noisePos.x - currentOffset.x) / currentSize,
(noisePos.y - currentOffset.y) / currentSize};
TPointD currentNoisePos =
(noisePos - currentOffset) * (1.0 / currentSize);
if (p.noiseType == 0) {
if (p.noiseType == PN_Params::Perlin) {
tmpVal += currentIntensity *
Noise1234::noise(currentNoisePos.x, currentNoisePos.y,
currentEvolution) /
p.int_sum;
} else {
} else { // Simplex case
tmpVal +=
currentIntensity *
SimplexNoise::noise(currentNoisePos.x, currentNoisePos.y,
@ -379,94 +385,85 @@ void Iwa_PNPerspectiveFx::calcPerinNoise_CPU(float4 *out_host,
}
}
float val = val_sum / (float)count;
double val = val_sum / (double)count;
/* クランプ */
val = (val < 0.0f) ? 0.0f : ((val > 1.0f) ? 1.0f : val);
// clamp
val = (val < 0.0) ? 0.0 : ((val > 1.0) ? 1.0 : val);
(*out_p).x = val;
(*out_p).y = val;
(*out_p).z = val;
(*out_p).w = (p.alp_rend_sw) ? val : 1.0f;
(*out_p).w = (p.alp_rend_sw) ? val : 1.0;
}
}
}
/*------------------------------------------------------------
WarpHVモードFresnel反射モード
------------------------------------------------------------*/
void Iwa_PNPerspectiveFx::calcPNNormal_CPU(float4 *out_host,
//------------------------------------------------------------
// render WarpHV / Fresnel modes
void Iwa_PNPerspectiveFx::calcPNNormal_CPU(double4 *out_host,
TDimensionI &dimOut, PN_Params &p,
bool isSubWave) {
/* 結果を収めるイテレータ */
float4 *out_p = out_host;
/* 各ピクセルについて */
double4 *out_p = out_host;
// compute for each pixel
for (int yy = 0; yy < p.drawLevel; yy++) {
for (int xx = 0; xx < dimOut.lx; xx++, out_p++) {
float2 screenPos = {(float)xx * p.a11 + (float)yy * p.a12 + p.a13,
(float)xx * p.a21 + (float)yy * p.a22 + p.a23};
/* ② Perlin Noise 平面上の座標を計算する */
float2 noisePos;
TPointD screenPos = p.aff * TPointD((double)xx, (double)yy);
// compute coordinate on the noise plane
TPointD noisePos;
noisePos.x = -(p.eyeLevel.y + p.fy_2) * (screenPos.x - p.eyeLevel.x) /
(screenPos.y - p.eyeLevel.y) +
p.eyeLevel.x;
noisePos.y = (p.fy_2 + screenPos.y) * p.A / (p.eyeLevel.y - screenPos.y);
float gradient[2]; /* 0 : よこ差分、1 : たて差分 */
double gradient[2]; // 0 : horizontal 1 : vertical
double delta = 0.001;
float delta = 0.001f;
for (int hv = 0; hv < 2; hv++) {
// initialize gradient
gradient[hv] = 0.0;
/* 横、縦差分それぞれについて */
for (int yokoTate = 0; yokoTate < 2; yokoTate++) {
/* 勾配の初期化 */
gradient[yokoTate] = 0.0f;
/* サンプリング位置のオフセットを求める */
float2 kinbouNoisePos[2] = {
float2{noisePos.x - ((yokoTate == 0) ? delta : 0.0f),
noisePos.y - ((yokoTate == 0) ? 0.0f : delta)},
float2{noisePos.x + ((yokoTate == 0) ? delta : 0.0f),
noisePos.y + ((yokoTate == 0) ? 0.0f : delta)}};
float currentSize = p.size;
float2 currentOffset = p.offset;
float currentIntensity = 1.0f;
// float2* basis_p = basis;
float currentEvolution = (isSubWave) ? p.time + 100.0f : p.time;
/* 各世代について */
// compute offset of sampling position
TPointD neighborNoisePos[2] = {
TPointD(noisePos.x - ((hv == 0) ? delta : 0.0),
noisePos.y - ((hv == 0) ? 0.0 : delta)),
TPointD(noisePos.x + ((hv == 0) ? delta : 0.0),
noisePos.y + ((hv == 0) ? 0.0 : delta))};
double currentSize = p.size;
TPointD currentOffset = p.offset;
double currentIntensity = 1.0;
double currentEvolution = (isSubWave) ? p.time + 100.0 : p.time;
// for each generation
for (int o = 0; o < p.octaves; o++, currentSize *= p.p_size,
currentOffset.x *= p.p_offset, currentOffset.y *= p.p_offset,
currentIntensity *= p.p_intensity) {
/* プラス方向、マイナス方向それぞれオフセットしたノイズ座標を求める */
float2 currentOffsetNoisePos[2];
// compute offset noise position
TPointD currentOffsetNoisePos[2];
for (int mp = 0; mp < 2; mp++)
currentOffsetNoisePos[mp] =
float2{(kinbouNoisePos[mp].x - currentOffset.x) / currentSize,
(kinbouNoisePos[mp].y - currentOffset.y) / currentSize};
(neighborNoisePos[mp] - currentOffset) * (1.0 / currentSize);
/* ノイズの差分を積算していく */
float noiseDiff;
// sum noise values differences
double noiseDiff;
// Perlin Noise
if (p.noiseType == 0) {
if (p.noiseType == PN_Params::Perlin) {
noiseDiff =
Noise1234::noise(currentOffsetNoisePos[1].x,
currentOffsetNoisePos[1].y, currentEvolution) -
Noise1234::noise(currentOffsetNoisePos[0].x,
currentOffsetNoisePos[0].y, currentEvolution);
} else {
/* インデックスをチェック */
/* まず、前後 */
CellIds kinbouIds[2] = {
} else { // Simplex
// compute cell indexes
CellIds neighborIds[2] = {
SimplexNoise::getCellIds(currentOffsetNoisePos[0].x,
currentOffsetNoisePos[0].y,
currentEvolution),
SimplexNoise::getCellIds(currentOffsetNoisePos[1].x,
currentOffsetNoisePos[1].y,
currentEvolution)};
/* 同じセルに入っていたら、普通に差分を計算 */
if (kinbouIds[0] == kinbouIds[1]) {
// simply compute difference if points are in the same cell
if (neighborIds[0] == neighborIds[1]) {
noiseDiff = SimplexNoise::noise(currentOffsetNoisePos[1].x,
currentOffsetNoisePos[1].y,
currentEvolution) -
@ -474,22 +471,21 @@ void Iwa_PNPerspectiveFx::calcPNNormal_CPU(float4 *out_host,
currentOffsetNoisePos[0].y,
currentEvolution);
}
/* 違うセルの場合、中心位置を用いる */
// use center cell id if points are in the differnt cells
else {
float2 currentCenterNoisePos = {
(noisePos.x - currentOffset.x) / currentSize,
(noisePos.y - currentOffset.y) / currentSize};
TPointD currentCenterNoisePos =
(noisePos - currentOffset) * (1.0 / currentSize);
CellIds centerIds = SimplexNoise::getCellIds(
currentCenterNoisePos.x, currentCenterNoisePos.y,
currentEvolution);
if (kinbouIds[0] == centerIds) {
if (neighborIds[0] == centerIds) {
noiseDiff = SimplexNoise::noise(currentCenterNoisePos.x,
currentCenterNoisePos.y,
currentEvolution) -
SimplexNoise::noise(currentOffsetNoisePos[0].x,
currentOffsetNoisePos[0].y,
currentEvolution);
} else // if(kinbouIds[1] == centerIds)
} else // if(neighborIds[1] == centerIds)
{
noiseDiff = SimplexNoise::noise(currentOffsetNoisePos[1].x,
currentOffsetNoisePos[1].y,
@ -498,90 +494,84 @@ void Iwa_PNPerspectiveFx::calcPNNormal_CPU(float4 *out_host,
currentCenterNoisePos.y,
currentEvolution);
}
/* 片端→中心の変位を使っているので、片端→片端に合わせて変位を2倍する
*/
noiseDiff *= 2.0f;
// multiply the difference
// 片端→中心の変位を使っているので、片端→片端に合わせて変位を2倍する
noiseDiff *= 2.0;
}
}
/* 差分に強度を乗算して足しこむ */
gradient[yokoTate] += currentIntensity * noiseDiff / p.int_sum;
// sum gradient
gradient[hv] += currentIntensity * noiseDiff / p.int_sum;
currentEvolution *= p.p_evolution;
}
}
/* X方向、Y方向の近傍ベクトルを計算する */
float3 vec_x = {delta * 2, 0.0f, gradient[0] * p.waveHeight};
float3 vec_y = {0.0f, delta * 2, gradient[1] * p.waveHeight};
float3 normal = normalize(cross(vec_x, vec_y));
// compute neighbor vectors
double3 vec_x = {delta * 2, 0.0, gradient[0] * p.waveHeight};
double3 vec_y = {0.0, delta * 2, gradient[1] * p.waveHeight};
double3 normal = normalize(cross(vec_x, vec_y));
/* カメラから平面へのベクトル */
float3 cam_vec = {noisePos.x - p.cam_pos.x, noisePos.y - p.cam_pos.y,
// camera to the center of projection plane
double3 cam_vec = {noisePos.x - p.cam_pos.x, noisePos.y - p.cam_pos.y,
-p.cam_pos.z};
cam_vec = normalize(cam_vec);
/* WarpHVの参照画像モード */
if (p.renderMode == 2 || p.renderMode == 4) {
/* 平面からの反射ベクトル */
float alpha = dot(normal, cam_vec);
float3 reflect_cam = {
2.0f * alpha * normal.x - cam_vec.x,
2.0f * alpha * normal.y - cam_vec.y,
2.0f * alpha * normal.z - cam_vec.z}; /* これの長さは1 */
/* 完全に水平な面で反射した場合の反射ベクトル */
float3 reflect_cam_mirror = {cam_vec.x, cam_vec.y, -cam_vec.z};
/* 角度のずれを格納する */
/* -PI/2 PI/2 */
float angle_h = atanf(reflect_cam.x / reflect_cam.y) -
atanf(reflect_cam_mirror.x / reflect_cam_mirror.y);
float angle_v = atanf(reflect_cam.z / reflect_cam.y) -
atanf(reflect_cam_mirror.z / reflect_cam_mirror.y);
if (p.renderMode == PN_Params::WarpHV ||
p.renderMode == PN_Params::WarpHV2) {
// relfected vector from the plane
double alpha = dot(normal, cam_vec);
double3 reflect_cam = {
2.0 * alpha * normal.x - cam_vec.x,
2.0 * alpha * normal.y - cam_vec.y,
2.0 * alpha * normal.z -
cam_vec.z}; // the length of this vector should be 1
// reflection vector if the plane is flat
double3 reflect_cam_mirror = {cam_vec.x, cam_vec.y, -cam_vec.z};
// compute the angle difference
// between the range from -PI/2 to PI/2
double angle_h = std::atan(reflect_cam.x / reflect_cam.y) -
std::atan(reflect_cam_mirror.x / reflect_cam_mirror.y);
double angle_v = std::atan(reflect_cam.z / reflect_cam.y) -
std::atan(reflect_cam_mirror.z / reflect_cam_mirror.y);
/* 30°を最大とする */
angle_h = 0.5f + angle_h / 0.5236f;
angle_v = 0.5f - angle_v / 0.5236f;
// maximum 30 degrees
angle_h = 0.5 + angle_h / 0.5236f;
angle_v = 0.5 - angle_v / 0.5236f;
/* クランプ */
angle_h = (angle_h < 0.0f) ? 0.0f : ((angle_h > 1.0f) ? 1.0f : angle_h);
angle_v = (angle_v < 0.0f) ? 0.0f : ((angle_v > 1.0f) ? 1.0f : angle_v);
// clamp
angle_h = (angle_h < 0.0) ? 0.0 : ((angle_h > 1.0) ? 1.0 : angle_h);
angle_v = (angle_v < 0.0) ? 0.0 : ((angle_v > 1.0) ? 1.0 : angle_v);
if (p.renderMode == 2) {
if (p.renderMode == PN_Params::WarpHV) {
(*out_p).x = angle_h;
(*out_p).y = angle_v;
(*out_p).z = 0.0f;
(*out_p).w = 1.0f;
} else // p.renderMode == 4
(*out_p).z = 0.0;
(*out_p).w = 1.0;
} else // WarpHV2 case
{
if (!isSubWave) {
(*out_p).y = angle_v;
(*out_p).z = 0.0f;
(*out_p).w = 1.0f;
(*out_p).z = 0.0;
(*out_p).w = 1.0;
} else
(*out_p).x = angle_v;
}
}
/* フレネル反射モード */
else if (p.renderMode == 3) {
cam_vec.x *= -1;
cam_vec.y *= -1;
cam_vec.z *= -1;
float diffuse_angle = acosf(dot(normal, cam_vec)) * 180.0f / 3.14159f;
float ref = 0.0f;
if (diffuse_angle >= 0.0f && diffuse_angle < 90.0f) {
int index = (int)diffuse_angle;
float ratio = diffuse_angle - (float)index;
float fresnel_ref =
fresnel[index] * (1.0f - ratio) + fresnel[index + 1] * ratio;
ref =
(fresnel_ref - p.base_fresnel_ref) / (1.0f - p.base_fresnel_ref);
} else if (diffuse_angle >= 90.0f)
ref = 1.0f;
// fresnel refrection mode
else if (p.renderMode == PN_Params::Fresnel) {
cam_vec.x *= -1.0;
cam_vec.y *= -1.0;
cam_vec.z *= -1.0;
double diffuse_angle = std::acos(dot(normal, cam_vec)) * M_180_PI;
double fresnel_ref = getFresnel(diffuse_angle);
double ref = (fresnel_ref - p.base_fresnel_ref) /
(p.top_fresnel_ref - p.base_fresnel_ref);
/* クランプ */
ref = (ref < 0.0f) ? 0.0f : ((ref > 1.0f) ? 1.0f : ref);
// clamp
ref = (ref < 0.0) ? 0.0 : ((ref > 1.0) ? 1.0 : ref);
(*out_p).x = ref;
(*out_p).y = ref;
(*out_p).z = ref;
(*out_p).w = (p.alp_rend_sw) ? ref : 1.0f;
(*out_p).w = (p.alp_rend_sw) ? ref : 1.0;
}
}
}

View file

@ -2,7 +2,7 @@
/*------------------------------------------------------------
Iwa_PNPerspectiveFx
PerlinNoise/SimplexNoiseパターンを生成
render perspective noise pattern.
------------------------------------------------------------*/
#ifndef IWA_PN_PERSPECTIVE_H
#define IWA_PN_PERSPECTIVE_H
@ -11,96 +11,85 @@
#include "stdfx.h"
#include "tparamset.h"
struct float2 {
float x, y;
struct double3 {
double x, y, z;
};
struct float3 {
float x, y, z;
};
struct float4 {
float x, y, z, w;
struct double4 {
double x, y, z, w;
};
/*計算に用いるパラメータ群*/
// parameters
struct PN_Params {
int renderMode;
int noiseType; /* 0:Perlin, 1: Simplex */
float size; /* 第1世代のノイズサイズ */
int octaves; /* 世代数 */
float2 offset; /* 第1世代のオフセット */
float p_intensity; /* 世代間の強度比 */
float p_size; /* 世代間のサイズ比 */
float p_offset; /* 世代間のオフセット比 */
float2 eyeLevel; /* 視点位置 */
int drawLevel; /* 描画範囲の下からの距離 描画を開始するスキャンライン位置 */
enum RenderMode {
Noise = 0,
Noise_NoResample,
WarpHV,
Fresnel,
WarpHV2
} renderMode;
enum NoiseType { Perlin = 0, Simplex } noiseType;
double size; // noise size of the first generation
int octaves; // generation count
TPointD offset; // offset of the first generation
double p_intensity; // intensity ratio between gen
double p_size; // size ratio between gen
double p_offset; // offset ratio between gen
TPointD eyeLevel;
int drawLevel; // vertical distance from the bottom to the top of the drawing
// region
bool alp_rend_sw;
float waveHeight; /* warpHV / フレネル反射のときのみ使う */
float fy_2;
float A;
float3 cam_pos;
float base_fresnel_ref; /* フレネル反射のときのみ使う */
float int_sum;
float a11, a12, a13;
float a21, a22, a23;
float time;
float p_evolution;
double waveHeight; // used in the WarpHV and Fresnel modes
double fy_2;
double A;
double3 cam_pos;
double base_fresnel_ref; // used in the Fresnel mode
double top_fresnel_ref; // used in the Fresnel mode
double int_sum;
TAffine aff;
double time;
double p_evolution;
};
class Iwa_PNPerspectiveFx final : public TStandardZeraryFx {
FX_PLUGIN_DECLARATION(Iwa_PNPerspectiveFx)
TIntEnumParamP
m_renderMode; /* 描画モード
-
-
-
WarpHV用オフセット
-
*/
TIntEnumParamP m_renderMode;
TIntEnumParamP m_noiseType;
TDoubleParamP m_size;
TDoubleParamP m_evolution;
TIntEnumParamP m_octaves;
TPointParamP m_offset;
TIntEnumParamP
m_noiseType; /* ノイズのタイプ
-
Perlin Noise
-
Simplex Noise */
TDoubleParamP m_persistance_intensity;
TDoubleParamP m_persistance_size;
TDoubleParamP m_persistance_evolution;
TDoubleParamP m_persistance_offset;
TDoubleParamP m_size; /* ベースとなる大きさ */
TDoubleParamP m_evolution; /* 展開 */
TIntEnumParamP m_octaves; /* 世代数 */
TPointParamP m_offset; /* ノイズのオフセット */
TDoubleParamP m_fov; // vertical angle of camera fov in degrees
TPointParamP m_eyeLevel; // vanishing point
TDoubleParamP m_persistance_intensity; /* 次の世代での振幅の倍率 */
TDoubleParamP m_persistance_size; /* 次の世代での波長の倍率 */
TDoubleParamP m_persistance_evolution; /* 次の世代での展開周期の倍率 */
TDoubleParamP m_persistance_offset; /* 次の世代でのオフセット距離の倍率 */
TBoolParamP m_alpha_rendering; // specify if render noise pattern to the
// alpha channel as well
TDoubleParamP m_fov; /* カメラ画角 */
TPointParamP m_eyeLevel; /* 消失点の位置 */
TDoubleParamP m_waveHeight;
TBoolParamP
m_alpha_rendering; /* アルファチャンネルもノイズを与えるかどうか */
TBoolParamP m_normalize_fresnel; // normalize fresnel reflectivity
TDoubleParamP m_normalize_margin;
TDoubleParamP m_waveHeight; /* 波の高さ */
/* 出力結果をChannel値に変換して格納 */
template <typename RASTER, typename PIXEL>
void setOutputRaster(float4 *srcMem, const RASTER dstRas, TDimensionI dim,
void setOutputRaster(double4 *srcMem, const RASTER dstRas, TDimensionI dim,
int drawLevel, const bool alp_rend_sw);
/* PerlinNoiseのパラメータを取得 */
void getPNParameters(TTile &tile, double frame,
const TRenderSettings &settings, PN_Params &params,
TDimensionI &dimOut);
/* 通常のイズのCPU計算 */
void calcPerinNoise_CPU(float4 *out_host, TDimensionI &dimOut, PN_Params &p,
// render for 2 Noise modes
void calcPerinNoise_CPU(double4 *out_host, TDimensionI &dimOut, PN_Params &p,
bool doResample);
/* WarpHVモード、Fresnel反射モード */
void calcPNNormal_CPU(float4 *out_host, TDimensionI &dimOut, PN_Params &p,
// render for WarpHV / Fresnel modes
void calcPNNormal_CPU(double4 *out_host, TDimensionI &dimOut, PN_Params &p,
bool isSubWave = false);
public:
@ -115,7 +104,7 @@ public:
const TRenderSettings &rend_sets) override;
void doCompute_CPU(TTile &tile, double frame, const TRenderSettings &settings,
float4 *out_host, TDimensionI &dimOut,
double4 *out_host, TDimensionI &dimOut,
PN_Params &pnParams);
void getParamUIs(TParamUIConcept *&concepts, int &length) override;