本文整理汇总了C++中ShadingPoint::get_point方法的典型用法代码示例。如果您正苦于以下问题:C++ ShadingPoint::get_point方法的具体用法?C++ ShadingPoint::get_point怎么用?C++ ShadingPoint::get_point使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类ShadingPoint
的用法示例。
在下文中一共展示了ShadingPoint::get_point方法的5个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: assert
DirectLightingIntegrator::DirectLightingIntegrator(
const ShadingContext& shading_context,
const LightSampler& light_sampler,
const ShadingPoint& shading_point,
const Vector3d& outgoing,
const BSDF& bsdf,
const void* bsdf_data,
const int bsdf_sampling_modes,
const int light_sampling_modes,
const size_t bsdf_sample_count,
const size_t light_sample_count,
const bool indirect)
: m_shading_context(shading_context)
, m_light_sampler(light_sampler)
, m_shading_point(shading_point)
, m_point(shading_point.get_point())
, m_geometric_normal(shading_point.get_geometric_normal())
, m_shading_basis(shading_point.get_shading_basis())
, m_time(shading_point.get_time())
, m_outgoing(outgoing)
, m_bsdf(bsdf)
, m_bsdf_data(bsdf_data)
, m_bsdf_sampling_modes(bsdf_sampling_modes)
, m_light_sampling_modes(light_sampling_modes)
, m_bsdf_sample_count(bsdf_sample_count)
, m_light_sample_count(light_sample_count)
, m_indirect(indirect)
{
assert(is_normalized(outgoing));
}
示例2: add_back_lighting
void add_back_lighting(
const InputValues& values,
SamplingContext& sampling_context,
const PixelContext& pixel_context,
const ShadingContext& shading_context,
const ShadingPoint& shading_point,
Spectrum& radiance,
SpectrumStack& aovs) const
{
const Vector3d& p = shading_point.get_point();
const Vector3d& n = shading_point.get_original_shading_normal();
const Vector3d& d = shading_point.get_ray().m_dir;
// Construct a ray perpendicular to the other side of the surface.
ShadingRay back_ray(shading_point.get_ray());
back_ray.m_tmax *= norm(d);
back_ray.m_dir = dot(d, n) > 0.0 ? -n : n;
back_ray.m_org = p - back_ray.m_tmax * back_ray.m_dir;
ShadingPoint back_shading_point(shading_point);
back_shading_point.set_ray(back_ray);
Spectrum back_radiance(0.0f);
SpectrumStack back_aovs(aovs.size(), 0.0f);
// Compute back lighting.
for (size_t i = 0; i < m_back_lighting_samples; ++i)
{
shading_context.get_lighting_engine()->compute_lighting(
sampling_context,
pixel_context,
shading_context,
back_shading_point,
back_radiance,
back_aovs);
}
// Apply translucency factor.
back_radiance *= values.m_translucency;
back_aovs *= values.m_translucency;
// Divide by the number of samples.
const float rcp_sample_count = 1.0f / static_cast<float>(m_back_lighting_samples);
back_radiance *= rcp_sample_count;
back_aovs *= rcp_sample_count;
// Add back lighting contribution.
radiance += back_radiance;
aovs += back_aovs;
}
示例3: sample
size_t SubsurfaceSampler::sample(
SamplingContext& sampling_context,
const ShadingPoint& outgoing_point,
const BSSRDF& bssrdf,
const void* bssrdf_data,
SubsurfaceSample samples[],
const size_t max_sample_count)
{
assert(max_sample_count > 0);
// Sample the diffusion profile.
BSSRDFSample bssrdf_sample(sampling_context);
if (!bssrdf.sample(bssrdf_data, bssrdf_sample))
return 0;
// Reject points too far away.
// This introduces negligible bias in comparison to the other approximations.
const Vector2d& point(bssrdf_sample.get_point());
const double radius2 = square_norm(point);
const double rmax2 = bssrdf_sample.get_rmax2();
if (radius2 > rmax2)
return 0;
// Evaluate the PDF of the diffusion profile.
const double radius = sqrt(radius2);
const double bssrdf_sample_pdf =
bssrdf.evaluate_pdf(bssrdf_data, bssrdf_sample.get_channel(), radius);
// Pick a sampling basis.
sampling_context.split_in_place(1, 1);
Axis sampling_axis;
Basis3d sampling_basis;
double sampling_basis_pdf;
pick_sampling_basis(
outgoing_point.get_shading_basis(),
sampling_context.next_double2(),
sampling_axis,
sampling_basis,
sampling_basis_pdf);
// Compute height of sample point on (positive) hemisphere of radius Rmax.
assert(rmax2 >= radius2);
const double h = sqrt(rmax2 - radius2);
// Compute sphere entry and exit points.
Vector3d entry_point, exit_point;
entry_point = exit_point = outgoing_point.get_point();
entry_point += sampling_basis.transform_to_parent(Vector3d(point[0], +h, point[1]));
exit_point += sampling_basis.transform_to_parent(Vector3d(point[0], -h, point[1]));
assert(feq(norm(exit_point - entry_point), 2.0 * h, 1.0e-9));
// Build a probe ray inscribed inside the sphere of radius Rmax.
ShadingRay probe_ray(
entry_point,
-sampling_basis.get_normal(),
0.0,
2.0 * h,
outgoing_point.get_time(),
VisibilityFlags::ProbeRay,
outgoing_point.get_ray().m_depth + 1);
const Material* material = outgoing_point.get_material();
ShadingPoint shading_points[2];
size_t shading_point_index = 0;
ShadingPoint* parent_shading_point = 0;
size_t sample_count = 0;
// Trace the ray and return all intersections (or up to max_sample_count of them) found inside the sphere.
while (true)
{
// Continue tracing the ray.
shading_points[shading_point_index].clear();
if (!m_shading_context.get_intersector().trace(
probe_ray,
shading_points[shading_point_index],
parent_shading_point))
break;
// Only consider points lying on surfaces with the same material as the outgoing point.
if (shading_points[shading_point_index].get_material() == material)
{
// Execute the OSL shader if we have one. Needed for bump mapping.
#ifdef APPLESEED_WITH_OSL
if (material->has_osl_surface())
{
sampling_context.split_in_place(1, 1);
m_shading_context.execute_osl_bump(
*material->get_osl_surface(),
shading_points[shading_point_index],
sampling_context.next_double2());
}
#endif
SubsurfaceSample& sample = samples[sample_count++];
sample.m_point = shading_points[shading_point_index];
// Compute sample probability.
sample.m_probability =
bssrdf_sample_pdf
* sampling_basis_pdf
//.........这里部分代码省略.........
示例4: evaluate
//.........这里部分代码省略.........
// Execute the OSL shader if there is one.
if (material_data.m_shader_group)
{
sampling_context.split_in_place(2, 1);
shading_context.execute_osl_bump(
*material_data.m_shader_group,
shading_point,
sampling_context.next_vector2<2>());
}
}
#endif
const Vector3d v =
m_shading_mode == ShadingNormal ? shading_point.get_shading_basis().get_normal() :
m_shading_mode == Tangent ? shading_point.get_shading_basis().get_tangent_u() :
shading_point.get_shading_basis().get_tangent_v();
shading_result.set_main_to_linear_rgb(vector3_to_color(v));
}
break;
case GeometricNormal:
shading_result.set_main_to_linear_rgb(
vector3_to_color(shading_point.get_geometric_normal()));
break;
case OriginalShadingNormal:
shading_result.set_main_to_linear_rgb(
vector3_to_color(shading_point.get_original_shading_normal()));
break;
case WorldSpacePosition:
{
const Vector3d& p = shading_point.get_point();
shading_result.set_main_to_linear_rgb(
Color3f(Color3d(p.x, p.y, p.z)));
}
break;
case Sides:
shading_result.set_main_to_linear_rgb(
shading_point.get_side() == ObjectInstance::FrontSide
? Color3f(0.0f, 0.0f, 1.0f)
: Color3f(1.0f, 0.0f, 0.0f));
break;
case Depth:
shading_result.set_main_to_linear_rgb(
Color3f(static_cast<float>(shading_point.get_distance())));
break;
case ScreenSpaceWireframe:
{
// Initialize the shading result to the background color.
shading_result.set_main_to_linear_rgba(Color4f(0.0f, 0.0f, 0.8f, 0.5f));
if (shading_point.is_triangle_primitive())
{
// Film space thickness of the wires.
const double SquareWireThickness = square(0.00025);
// Retrieve the time, the scene and the camera.
const double time = shading_point.get_time().m_absolute;
const Scene& scene = shading_point.get_scene();
const Camera& camera = *scene.get_camera();
示例5: do_trace_same_material
bool Intersector::do_trace_same_material(
const ShadingRay& ray,
const ShadingPoint& parent_shading_point,
const bool offset_origin,
ShadingPoint& shading_point) const
{
ShadingRay up_ray(ray);
ShadingRay down_ray(ray);
down_ray.m_dir = -down_ray.m_dir;
if (offset_origin)
{
parent_shading_point.refine_and_offset();
const Vector3d offset =
parent_shading_point.get_offset_point(down_ray.m_dir) - parent_shading_point.get_point();
up_ray.m_org += offset;
}
const Material* parent_material = parent_shading_point.get_material();
// Trace the ray.
ShadingPoint up_shading_point;
trace_back_sides(
up_ray,
up_shading_point);
// Discard objects with different materials.
if (up_shading_point.hit())
{
if (up_shading_point.get_opposite_material() != parent_material)
up_shading_point.clear();
}
// Trace the opposite ray.
ShadingPoint down_shading_point;
trace_back_sides(
down_ray,
down_shading_point);
// Discard objects with different materials.
if (down_shading_point.hit())
{
if (down_shading_point.get_opposite_material() != parent_material)
down_shading_point.clear();
}
// Keep the nearest hit, if any.
if (up_shading_point.hit() && down_shading_point.hit())
{
shading_point =
up_shading_point.get_distance() < down_shading_point.get_distance() ? up_shading_point : down_shading_point;
return true;
}
else if (up_shading_point.hit())
{
shading_point = up_shading_point;
return true;
}
else if (down_shading_point.hit())
{
shading_point = down_shading_point;
return true;
}
return false;
}