十三,打印辐照度图

news2024/11/19 18:22:12

上节HDR环境贴图进行卷积后,得到的就是辐照度图,表示的是周围环境间接漫反射光的积分。

现在也进行下打印,和前面打印HDR环境贴图一样,只是由于辐照度图做了平均,失去了大量高频部分,因此,可以用较低分辨率32x32存储。

运行结果如下:
在这里插入图片描述
代码如下:
#include <osg/TextureCubeMap>
#include <osg/TexGen>
#include <osg/TexEnvCombine>
#include <osgUtil/ReflectionMapGenerator>
#include <osgDB/ReadFile>
#include <osgViewer/Viewer>
#include <osg/NodeVisitor>
#include <osg/ShapeDrawable>
#include <osgGA/TrackballManipulator>
#include <osgDB/WriteFile>

static const char * vertexShader =
{
“in vec3 aPos;\n”
“varying vec3 WorldPos;”
“void main(void)\n”
“{\n”
“WorldPos = aPos;\n”
" gl_Position = ftransform();\n"
“}\n”
};

static const char *psShader =
{
"varying vec3 WorldPos; "
"uniform samplerCube environmentMap; "
"const float PI = 3.14159265359; "
"void main() "
"{ "
" vec3 N = normalize(WorldPos); "
" vec3 irradiance = vec3(0.0); "
" vec3 up = vec3(0.0, 1.0, 0.0); "
" vec3 right = normalize(cross(up, N)); "
" up = normalize(cross(N, right)); "
" float sampleDelta = 0.025; "
" float nrSamples = 0.0; "
" for (float phi = 0.0; phi < 2.0 * PI; phi += sampleDelta) "
" { "
" for (float theta = 0.0; theta < 0.5 * PI; theta += sampleDelta) "
" { "
" vec3 tangentSample = vec3(sin(theta) * cos(phi), sin(theta) * sin(phi), cos(theta)); "
" vec3 sampleVec = tangentSample.x * right + tangentSample.y * up + tangentSample.z * N; "
" irradiance += texture(environmentMap, sampleVec).rgb * cos(theta) * sin(theta); "
" nrSamples++; "
" } "
" } "
" irradiance = PI * irradiance * (1.0 / float(nrSamples)); "
" gl_FragColor = vec4(irradiance, 1.0); "
“}”
};
class MyNodeVisitor : public osg::NodeVisitor
{
public:
MyNodeVisitor() : osg::NodeVisitor(osg::NodeVisitor::TRAVERSE_ALL_CHILDREN)
{

}
void apply(osg::Geode& geode)
{
	int count = geode.getNumDrawables();
	for (int i = 0; i < count; i++)
	{
		osg::ref_ptr<osg::Geometry> geometry = geode.getDrawable(i)->asGeometry();
		if (!geometry.valid())
		{
			continue;
		}
		osg::Array* vertexArray = geometry->getVertexArray();
		geometry->setVertexAttribArray(1, vertexArray);

	}
	traverse(geode);
}

};

osg::ref_ptrosg::TextureCubeMap getTextureCubeMap(osgViewer::Viewer& viewer)
{
unsigned int screenWidth, screenHeight;
osg::GraphicsContext::WindowingSystemInterface * wsInterface = osg::GraphicsContext::getWindowingSystemInterface();
wsInterface->getScreenResolution(osg::GraphicsContext::ScreenIdentifier(0), screenWidth, screenHeight);

osg::ref_ptr<osg::GraphicsContext::Traits> traits = new osg::GraphicsContext::Traits;
traits->x = 0;
traits->y = 0;
traits->width = screenWidth;
traits->height = screenHeight;
traits->windowDecoration = false;
traits->doubleBuffer = true;
traits->sharedContext = 0;
traits->readDISPLAY();
traits->setUndefinedScreenDetailsToDefaultScreen();

osg::ref_ptr<osg::GraphicsContext> gc = osg::GraphicsContext::createGraphicsContext(traits.get());
if (!gc)
{
	osg::notify(osg::NOTICE) << "GraphicsWindow has not been created successfully." << std::endl;
	return NULL;
}

int textureWidth = 32;
int textureHeight = 32;

osg::ref_ptr<osg::TextureCubeMap> texture = new osg::TextureCubeMap;

texture->setTextureSize(textureWidth, textureHeight);
texture->setInternalFormat(GL_RGB);
texture->setFilter(osg::Texture::MIN_FILTER, osg::Texture::LINEAR);
texture->setFilter(osg::Texture::MAG_FILTER, osg::Texture::LINEAR);
texture->setWrap(osg::Texture::WRAP_S, osg::Texture::CLAMP_TO_EDGE);
texture->setWrap(osg::Texture::WRAP_T, osg::Texture::CLAMP_TO_EDGE);
texture->setWrap(osg::Texture::WRAP_R, osg::Texture::CLAMP_TO_EDGE);

osg::Camera::RenderTargetImplementation renderTargetImplementation = osg::Camera::FRAME_BUFFER_OBJECT;
// front face
{

	osg::ref_ptr<osg::Camera> camera = new osg::Camera;
	camera->setName("Front face camera");
	camera->setGraphicsContext(gc.get());
	camera->setViewport(new osg::Viewport(0, 0, textureWidth, textureHeight));
	camera->setAllowEventFocus(false);
	camera->setRenderTargetImplementation(renderTargetImplementation);
	camera->setRenderOrder(osg::Camera::PRE_RENDER);
	//关联采样贴图
	camera->attach(osg::Camera::COLOR_BUFFER, texture, 0, osg::TextureCubeMap::POSITIVE_Y);

	osg::ref_ptr<osg::Image> printImage = new osg::Image;
	printImage->setFileName(camera->getName());
	printImage->allocateImage(textureWidth, textureHeight, 1, GL_RGBA, GL_UNSIGNED_BYTE);
	texture->setImage(0, printImage);
	camera->attach(osg::Camera::COLOR_BUFFER, printImage);
	viewer.addSlave(camera.get(), osg::Matrixd(), osg::Matrixd());
}


// top face
{
	osg::ref_ptr<osg::Camera> camera = new osg::Camera;
	camera->setName("Top face camera");
	camera->setGraphicsContext(gc.get());
	camera->setViewport(new osg::Viewport(0, 0, textureWidth, textureHeight));
	camera->setAllowEventFocus(false);
	camera->setRenderTargetImplementation(renderTargetImplementation);
	camera->setRenderOrder(osg::Camera::PRE_RENDER);
	//关联采样贴图
	camera->attach(osg::Camera::COLOR_BUFFER, texture, 0, osg::TextureCubeMap::POSITIVE_Z);
	osg::ref_ptr<osg::Image> printImage = new osg::Image;
	printImage->setFileName(camera->getName());
	printImage->allocateImage(textureWidth, textureHeight, 1, GL_RGBA, GL_UNSIGNED_BYTE);
	texture->setImage(1, printImage);
	camera->attach(osg::Camera::COLOR_BUFFER, printImage);
	viewer.addSlave(camera.get(), osg::Matrixd(), osg::Matrixd::rotate(osg::inDegrees(-90.0f), 1.0, 0.0, 0.0));
}

// left face
{
	osg::ref_ptr<osg::Camera> camera = new osg::Camera;
	camera->setName("Left face camera");
	camera->setGraphicsContext(gc.get());
	camera->setViewport(new osg::Viewport(0, 0, textureWidth, textureHeight));
	camera->setAllowEventFocus(false);
	camera->setRenderTargetImplementation(renderTargetImplementation);
	camera->setRenderOrder(osg::Camera::PRE_RENDER);
	//关联采样贴图
	camera->attach(osg::Camera::COLOR_BUFFER, texture, 0, osg::TextureCubeMap::NEGATIVE_X);
	osg::ref_ptr<osg::Image> printImage = new osg::Image;
	printImage->setFileName(camera->getName());
	printImage->allocateImage(textureWidth, textureHeight, 1, GL_RGBA, GL_UNSIGNED_BYTE);
	texture->setImage(2, printImage);
	camera->attach(osg::Camera::COLOR_BUFFER, printImage);
	viewer.addSlave(camera.get(), osg::Matrixd(), osg::Matrixd::rotate(osg::inDegrees(-90.0f), 0.0, 1.0, 0.0) * osg::Matrixd::rotate(osg::inDegrees(-90.0f), 0.0, 0.0, 1.0));
}

// right face
{
	osg::ref_ptr<osg::Camera> camera = new osg::Camera;
	camera->setName("Right face camera");
	camera->setGraphicsContext(gc.get());
	camera->setViewport(new osg::Viewport(0, 0, textureWidth, textureHeight));
	camera->setAllowEventFocus(false);
	camera->setRenderTargetImplementation(renderTargetImplementation);
	camera->setRenderOrder(osg::Camera::PRE_RENDER);
	//关联采样贴图
	camera->attach(osg::Camera::COLOR_BUFFER, texture, 0, osg::TextureCubeMap::POSITIVE_X);
	osg::ref_ptr<osg::Image> printImage = new osg::Image;
	printImage->setFileName(camera->getName());
	printImage->allocateImage(textureWidth, textureHeight, 1, GL_RGBA, GL_UNSIGNED_BYTE);
	texture->setImage(3, printImage);
	camera->attach(osg::Camera::COLOR_BUFFER, printImage);
	viewer.addSlave(camera.get(), osg::Matrixd(), osg::Matrixd::rotate(osg::inDegrees(90.0f), 0.0, 1.0, 0.0) * osg::Matrixd::rotate(osg::inDegrees(90.0f), 0.0, 0.0, 1.0));

}

// bottom face
{
	osg::ref_ptr<osg::Camera> camera = new osg::Camera;
	camera->setGraphicsContext(gc.get());
	camera->setName("Bottom face camera");
	camera->setViewport(new osg::Viewport(0, 0, textureWidth, textureHeight));
	camera->setAllowEventFocus(false);
	camera->setRenderTargetImplementation(renderTargetImplementation);
	camera->setRenderOrder(osg::Camera::PRE_RENDER);
	//关联采样贴图
	camera->attach(osg::Camera::COLOR_BUFFER, texture, 0, osg::TextureCubeMap::NEGATIVE_Z);
	osg::ref_ptr<osg::Image> printImage = new osg::Image;
	printImage->setFileName(camera->getName());
	printImage->allocateImage(textureWidth, textureHeight, 1, GL_RGBA, GL_UNSIGNED_BYTE);
	texture->setImage(4, printImage);
	camera->attach(osg::Camera::COLOR_BUFFER, printImage);
	viewer.addSlave(camera.get(), osg::Matrixd(), osg::Matrixd::rotate(osg::inDegrees(90.0f), 1.0, 0.0, 0.0) * osg::Matrixd::rotate(osg::inDegrees(180.0f), 0.0, 0.0, 1.0));

}

// back face
{
	osg::ref_ptr<osg::Camera> camera = new osg::Camera;
	camera->setName("Back face camera");
	camera->setGraphicsContext(gc.get());
	camera->setViewport(new osg::Viewport(0, 0, textureWidth, textureHeight));
	camera->setAllowEventFocus(false);
	camera->setRenderTargetImplementation(renderTargetImplementation);
	camera->setRenderOrder(osg::Camera::PRE_RENDER);
	//关联采样贴图
	camera->attach(osg::Camera::COLOR_BUFFER, texture, 0, osg::TextureCubeMap::NEGATIVE_Y);
	osg::ref_ptr<osg::Image> printImage = new osg::Image;
	printImage->setFileName(camera->getName());
	printImage->allocateImage(textureWidth, textureHeight, 1, GL_RGBA, GL_UNSIGNED_BYTE);
	texture->setImage(5, printImage);
	camera->attach(osg::Camera::COLOR_BUFFER, printImage);
	viewer.addSlave(camera.get(), osg::Matrixd(), osg::Matrixd::rotate(osg::inDegrees(180.0f), 1.0, 0.0, 0.0));

}

viewer.getCamera()->setProjectionMatrixAsPerspective(90.0f, 1.0, 0.1, 10);



//viewer.getCamera()->setNearFarRatio(0.0001f);
return texture;

}

int main()
{
osg::ref_ptrosg::TextureCubeMap tcm = new osg::TextureCubeMap;
tcm->setTextureSize(512, 512);
tcm->setFilter(osg::Texture::MIN_FILTER, osg::Texture::LINEAR);
tcm->setFilter(osg::Texture::MAG_FILTER, osg::Texture::LINEAR);
tcm->setWrap(osg::Texture::WRAP_S, osg::Texture::CLAMP_TO_EDGE);
tcm->setWrap(osg::Texture::WRAP_T, osg::Texture::CLAMP_TO_EDGE);
tcm->setWrap(osg::Texture::WRAP_R, osg::Texture::CLAMP_TO_EDGE);

std::string strImagePosX = "D:/delete/Right face camera.bmp";
osg::ref_ptr<osg::Image> imagePosX = osgDB::readImageFile(strImagePosX);
tcm->setImage(osg::TextureCubeMap::POSITIVE_X, imagePosX);
std::string strImageNegX = "D:/delete/Left face camera.bmp";
osg::ref_ptr<osg::Image> imageNegX = osgDB::readImageFile(strImageNegX);
tcm->setImage(osg::TextureCubeMap::NEGATIVE_X, imageNegX);

std::string strImagePosY = "D:/delete/Front face camera.bmp";;
osg::ref_ptr<osg::Image> imagePosY = osgDB::readImageFile(strImagePosY);
tcm->setImage(osg::TextureCubeMap::POSITIVE_Y, imagePosY);
std::string strImageNegY = "D:/delete/Back face camera.bmp";;
osg::ref_ptr<osg::Image> imageNegY = osgDB::readImageFile(strImageNegY);
tcm->setImage(osg::TextureCubeMap::NEGATIVE_Y, imageNegY);

std::string strImagePosZ = "D:/delete/Top face camera.bmp";
osg::ref_ptr<osg::Image> imagePosZ = osgDB::readImageFile(strImagePosZ);
tcm->setImage(osg::TextureCubeMap::POSITIVE_Z, imagePosZ);
std::string strImageNegZ = "D:/delete/Bottom face camera.bmp";
osg::ref_ptr<osg::Image> imageNegZ = osgDB::readImageFile(strImageNegZ);
tcm->setImage(osg::TextureCubeMap::NEGATIVE_Z, imageNegZ);

osg::ref_ptr<osg::Box> box = new osg::Box(osg::Vec3(0, 0, 0), 1);
osg::ref_ptr<osg::ShapeDrawable> drawable = new osg::ShapeDrawable(box);
osg::ref_ptr<osg::Geode> geode = new osg::Geode;
geode->addDrawable(drawable);
MyNodeVisitor nv;
geode->accept(nv);
osg::ref_ptr<osg::StateSet> stateset = geode->getOrCreateStateSet();
stateset->setTextureAttributeAndModes(0, tcm, osg::StateAttribute::OVERRIDE | osg::StateAttribute::ON);

//shader

osg::ref_ptr<osg::Shader> vs1 = new osg::Shader(osg::Shader::VERTEX, vertexShader);
osg::ref_ptr<osg::Shader> ps1 = new osg::Shader(osg::Shader::FRAGMENT, psShader);
osg::ref_ptr<osg::Program> program1 = new osg::Program;
program1->addShader(vs1);
program1->addShader(ps1);
program1->addBindAttribLocation("aPos", 1);

osg::ref_ptr<osg::Uniform> tex0Uniform = new osg::Uniform("environmentMap", 0);
stateset->addUniform(tex0Uniform);
stateset->setAttribute(program1, osg::StateAttribute::ON);


osgViewer::Viewer viewer;
osg::ref_ptr<osgGA::TrackballManipulator> manipulator = new osgGA::TrackballManipulator();
viewer.setCameraManipulator(manipulator);
osg::Vec3d newEye(0, 0, 0);
osg::Vec3 newCenter(0, 0, 0);
osg::Vec3 newUp(0, 1, 0);
manipulator->setHomePosition(newEye, newCenter, newUp);
osg::ref_ptr<osg::TextureCubeMap> textureCubeMap = getTextureCubeMap(viewer);
viewer.setSceneData(geode.get());

bool bPrinted = false;
while (!viewer.done())
{
	viewer.frame();
	if (!bPrinted)
	{
		bPrinted = true;
		int imageNumber = textureCubeMap->getNumImages();
		for (int i = 0; i < imageNumber; i++)
		{

			osg::ref_ptr<osg::Image> theImage = textureCubeMap->getImage(i);
			std::string strPrintName = "E:/irradiance/" + theImage->getFileName() + ".bmp";
			osgDB::writeImageFile(*theImage, strPrintName);
		}
	}
}
return 0;

}

本文来自互联网用户投稿,该文观点仅代表作者本人,不代表本站立场。本站仅提供信息存储空间服务,不拥有所有权,不承担相关法律责任。如若转载,请注明出处:http://www.coloradmin.cn/o/1046541.html

如若内容造成侵权/违法违规/事实不符,请联系多彩编程网进行投诉反馈,一经查实,立即删除!

相关文章

unity 限制 相机移动 区域(无需碰撞检测)

限制功能原著地址&#xff1a;unity限制相机可移动区域&#xff08;box collider&#xff09;_unity限制相机移动区域_manson-liao的博客-CSDN博客 一、创建限制区域 创建一个Cube&#xff0c;Scale大小1&#xff0c;添加组件&#xff1a;BoxCollder&#xff0c;调整BoxColld…

花式打印0~100中3的倍数

列表解析3的倍数负步长切片倒序&#xff0c;iter、zip函数配合实现分行格式打印。 (本笔记适合熟悉python列表解析式的 coder 翻阅) 【学习的细节是欢悦的历程】 Python 官网&#xff1a;https://www.python.org/ Free&#xff1a;大咖免费“圣经”教程《 python 完全自学教程…

【开发篇】九、SpringBoot整合ES(ElasticSearch)

文章目录 1、整合2、简单示例3、一点补充4、增删改查索引与文档 1、整合 整合思路都一样&#xff0c;先起步依赖或普通依赖&#xff0c;再配置&#xff0c;再封装的操作对象。先引入依赖&#xff1a; <dependency> <groupId>org.springframework.boot</grou…

电子签章软件怎么安装?选本地私有还是SaaS云?

出于高效便捷、安全防伪&#xff0c;以及跟上数字化转型趋势的考虑&#xff0c;越来越多的企业开始考虑使用电子签章软件。 但是每当企业考虑购买电子签章软件时&#xff0c;往往都会面对本地私有部署和SaaS公有云两种不同的电子签章软件安装部署方式&#xff0c;而不知道到底应…

使用c++实现输出爱心(软件:visual Studio)

#include <iostream> using namespace std;int main() {//爱心曲线方程(x^2y^2-a)^3-x^2*y30double a 0.5;//定义绘图边界double bound 1.3 * sqrt(a);//x,y坐标变化步长double step 0.05;//二维扫描所有点,外层逐层扫描for (double y bound; y > -bound; y - ste…

DeepSpeed4Science:利用先进的AI系统优化技术实现科学发现

本文转载自微软 DeepSpeed 团队官方知乎账号&#xff1a;zhihu.com/people/deepspeed&#xff0c;由微软 DeepSpeed 团队翻译自官方英文博客&#xff1a;Announcing the DeepSpeed4Science Initiative: Enabling large-scale scientific discovery through sophisticated AI sy…

机器学习,深度学习

一 、Numpy 1.1 安装numpy 2.2 Numpy操作数组 jupyter扩展插件&#xff08;用于显示目录&#xff09; 1、pip install jupyter_contrib_nbextensions -i https://pypi.tuna.tsinghua.edu.cn/simple 2、pip install jupyter_nbextensions_configurator -i https://pypi.tuna.t…

26069-2022 硅单晶退火片 思维导图

声明 本文是学习GB-T 26069-2022 硅单晶退火片. 而整理的学习笔记,分享出来希望更多人受益,如果存在侵权请及时联系我们 1 范围 本文件规定了硅单晶退火片(以下简称退火片)的分类、技术要求、试验方法、检验规则、包装、标志、 运输、贮存、随行文件及订货单内容。 本文件…

如何去开展软件测试工作

1. 软件测试 在一般的项目中&#xff0c;一开始均为手动测试&#xff0c;由于自动化测试前期投入较大&#xff0c;一般要软件项目达到一定的规模&#xff0c;更新频次和质量均有一定要求时才会上自动化测试或软件测试。 1.1. 项目中每个成员的测试职责 软件测试从来不是某一…

windows server 远程桌面服务配置和授权解决方法

适用&#xff1a;win server 2016以上 win server默认的连接数是两个用户。 1.添加远程桌面授权服务 第一步&#xff1a;服务器管理 - 添加角色和功能打开添加角色和功能向导窗口&#xff0c;选择基于角色或给予功能安装 第二步&#xff1a;添加远程桌面会话主机和远程桌面授…

Serlet API详解

目录 一、HttpServlet 1.1 处理doGet请求 1.2 处理doPost请求 二、HttpServletRequest 2.1 核心方法 三、HttpServletRespons 3.1 核心方法 一、HttpServlet 在编写Servlet代码的时候&#xff0c;首先第一步要做的就是继承HttpServlet类&#xff0c;并重写其中的某些方法 核心…

Java-day18(网络编程)

网络编程 1.概述 Java提供跨平台的网络类库&#xff0c;可以实现无痛的网络连接&#xff0c;程序员面对的是一个统一的网络编程环境 网络编程的目的&#xff1a;直接或间接地通过网络协议与其他计算机进行通信 网络编程的两个主要问题&#xff1a; 1.如何准确定位网络上一台…

MybatisPlus自定义SQL用法

1、功能概述&#xff1f; MybatisPlus框架提供了BaseMapper接口供我们使用&#xff0c;大大的方便了我们的基础开发&#xff0c;但是BaseMapper中提供的方法很多情况下不够用&#xff0c;这个时候我们依旧需要自定义SQL,也就是跟mybatis的用法相同&#xff0c;自定义xml映射文…

VS CODE中的筛选器如何打开?

最近更新了vscode1.82版本&#xff0c;发现在git管理界面有一个“筛选器”功能&#xff0c;十分好用&#xff0c;后来关掉了&#xff0c;找了好久都没有找到办法打开这个筛选器功能&#xff0c;今天无意中不知道按到了哪个快捷键&#xff0c;打开了&#xff0c;就是下图这个&am…

矿山无人驾驶的“奇点时刻”

三年前&#xff0c;矿山无人驾驶赛道还处于整个自动驾驶产业“鄙视链的最底端”&#xff1b;但三年后的今天&#xff0c;这个赛道&#xff0c;却成了无人驾驶&#xff08;L4&#xff09;商业化落地难得的亮点——当前&#xff0c;头部的几家矿山无人驾驶公司都已实现去安全员运…

C#WPF通知更改公共类使用实例

本文实例演示C#WPF通知更改公共类使用实例,通过使用公共类简化了代码。其中的代码中也实现了命令的用法。 定义: INotifyPropertyChanged 接口:用于向客户端(通常是执行绑定的客户端)发出某一属性值已更改的通知。 首先创建WPF项目,添加按钮和文本控件 <Window x:C…

05-Zookeeper典型使用场景实战

上一篇&#xff1a;04-Zookeeper集群详解 1. Zookeeper 分布式锁加锁原理 如上实现方式在并发问题比较严重的情况下&#xff0c;性能会下降的比较厉害&#xff0c;主要原因是&#xff0c;所有的连接都在对同一个节点进行监听&#xff0c;当服务器检测到删除事件时&#xff0c…

平面设计cdr和ai有什么区别,哪个好用?2023年全新功能解析

平面设计cdr和ai有什么区别 我们做设计的同学经常会把cdr和ai来做比较。要知道&#xff0c;cdr和ai软件都是可以制作专业的矢量图。二者在功能上各有千秋&#xff0c;在绘图领域中也是平分秋色&#xff0c;绝大多数的效果&#xff0c;谁都能完成。但是对于操作方面&#xff0c;…

vue3 - 使用 xlsx 库将数据导出到 Excel 文件

GitHub Demo 地址 在线预览 xlsx是由SheetJS开发的一个处理excel文件的JavaScript库。它可以读取、编写和操作 Excel 文件 安装xlsx npm install xlsx --save实现一个通过的数据导出工具类 import * as XLSX from xlsx/*** description: 导出excel* param {any} dataList* p…

西域商品详情数据接口

西域平台是西域智慧供应链&#xff08;上海&#xff09;股份公司旗下的MRO工业品B2B电商采购平台。 西域平台成立于2002年&#xff0c;于2009年进入MRO工业品B2B电商行业。经过十多年深耕与探索&#xff0c;西域平台已为包括80%央国企及60%的全球500强企业在内的3万余家国内外…