跟着cherno手搓游戏引擎【22】CameraController、Resize

news2024/11/20 8:34:40

前置:

YOTO.h: 

#pragma once

//用于YOTO APP

#include "YOTO/Application.h"
#include"YOTO/Layer.h"
#include "YOTO/Log.h"

#include"YOTO/Core/Timestep.h"

#include"YOTO/Input.h"
#include"YOTO/KeyCode.h"
#include"YOTO/MouseButtonCodes.h"
#include "YOTO/OrthographicCameraController.h"

#include"YOTO/ImGui/ImGuiLayer.h"

//Renderer
#include"YOTO/Renderer/Renderer.h"
#include"YOTO/Renderer/RenderCommand.h"

#include"YOTO/Renderer/Buffer.h"
#include"YOTO/Renderer/Shader.h"
#include"YOTO/Renderer/Texture.h"
#include"YOTO/Renderer/VertexArray.h"

#include"YOTO/Renderer/OrthographicCamera.h"



//入口点
#include"YOTO/EntryPoint.h"

 添加方法:

OrthographicCamera.h:

#pragma once
#include <glm/glm.hpp>
namespace YOTO {
	class OrthographicCamera
	{
	public:
		OrthographicCamera(float left, float right, float bottom, float top);

		void SetProjection(float left, float right, float bottom, float top);

		const glm::vec3& GetPosition()const { return m_Position; }
		void SetPosition(const glm::vec3& position) {
			m_Position = position;
			RecalculateViewMatrix();
		}

		float GetRotation()const { return m_Rotation; }
		void SetRotation(float rotation) {
			m_Rotation = rotation;
			RecalculateViewMatrix();
		}

		const glm::mat4& GetProjectionMatrix()const { return m_ProjectionMatrix; }
		const glm::mat4& GetViewMatrix()const { return m_ViewMatrix; }
		const glm::mat4& GetViewProjectionMatrix()const { return m_ViewProjectionMatrix; }
	private:
		void RecalculateViewMatrix();
	private:
		glm::mat4 m_ProjectionMatrix;
		glm::mat4 m_ViewMatrix;
		glm::mat4 m_ViewProjectionMatrix;
		glm::vec3 m_Position = { 0.0f ,0.0f ,0.0f };
		float m_Rotation = 0.0f;

	};

}

OrthographicCamera.cpp:

#include "ytpch.h"
#include "OrthographicCamera.h"
#include <glm/gtc/matrix_transform.hpp>
namespace YOTO {
	OrthographicCamera::OrthographicCamera(float left, float right, float bottom, float top)
		:m_ProjectionMatrix(glm::ortho(left, right, bottom, top)), m_ViewMatrix(1.0f)
	{
		m_ViewProjectionMatrix = m_ProjectionMatrix * m_ViewMatrix;
	}

	void OrthographicCamera::SetProjection(float left, float right, float bottom, float top) {
		m_ProjectionMatrix = glm::ortho(left, right, bottom, top);
		m_ViewProjectionMatrix = m_ProjectionMatrix * m_ViewMatrix;
	}
	void OrthographicCamera::RecalculateViewMatrix()
	{
		glm::mat4 transform = glm::translate(glm::mat4(1.0f), m_Position) *
			glm::rotate(glm::mat4(1.0f), glm::radians(m_Rotation), glm::vec3(0, 0, 1));
		m_ViewMatrix = glm::inverse(transform);
		m_ViewProjectionMatrix = m_ProjectionMatrix * m_ViewMatrix;
	}
}

封装控制类:

OrthographicCameraController.h: 

#pragma once
#include "YOTO/Renderer/OrthographicCamera.h"
#include"YOTO/Core/Timestep.h"
#include "YOTO/Event/ApplicationEvent.h"
#include "YOTO/Event/MouseEvent.h"
namespace YOTO {
	class OrthographicCameraController {
	public:
		OrthographicCameraController(float aspectRatio, bool rotation=false );//0,1280
		void OnUpdate(Timestep ts);
		void OnEvent(Event& e);
		OrthographicCamera& GetCamera() {
			return m_Camera
				;
		}
		const OrthographicCamera& GetCamera()const  {
			return m_Camera
				;
		}

	private:
		bool OnMouseScrolled(MouseScrolledEvent &e);
		bool OnWindowResized(WindowResizeEvent& e);
	private:

		float m_AspectRatio;//横纵比
		float m_ZoomLevel = 1.0f;
		OrthographicCamera m_Camera;

		bool m_Rotation;

		glm::vec3 m_CameraPosition = {0.0f,0.0f,0.0f};
		float m_CameraRotation = 0.0f;
		float m_CameraTranslationSpeed = 1.0f, m_CameraRotationSpeed = 180.0f;
	};
}

 OrthographicCameraController.cpp: 

#include "ytpch.h"
#include "OrthographicCameraController.h"
#include"YOTO/Input.h"
#include <YOTO/KeyCode.h>
namespace YOTO {

	OrthographicCameraController::OrthographicCameraController(float aspectRatio, bool rotation)
		:m_AspectRatio(aspectRatio),m_Camera(-m_AspectRatio*m_ZoomLevel,m_AspectRatio*m_ZoomLevel,-m_ZoomLevel,m_ZoomLevel),m_Rotation(rotation)
	{

	}

	 
	void OrthographicCameraController::OnUpdate(Timestep ts)
	{
		if (Input::IsKeyPressed(YT_KEY_A)) {
			m_CameraPosition.x -= m_CameraTranslationSpeed * ts;
		}
		else if (Input::IsKeyPressed(YT_KEY_D)) {
			m_CameraPosition.x += m_CameraTranslationSpeed * ts;
		}
		if (Input::IsKeyPressed(YT_KEY_S)) {
			m_CameraPosition.y -= m_CameraTranslationSpeed * ts;
		}
		else if (Input::IsKeyPressed(YT_KEY_W)) {
			m_CameraPosition.y += m_CameraTranslationSpeed * ts;
		}
		if (m_Rotation) {
			if (Input::IsKeyPressed(YT_KEY_Q)) {
				m_CameraRotation += m_CameraRotationSpeed * ts;
			}
			else if (Input::IsKeyPressed(YT_KEY_E)) {
				m_CameraRotation -= m_CameraRotationSpeed * ts;
			}
			m_Camera.SetRotation(m_CameraRotation);
		}
		m_Camera.SetPosition(m_CameraPosition);
		m_CameraTranslationSpeed = m_ZoomLevel;
		}

	
	void OrthographicCameraController::OnEvent(Event& e)
	{
		EventDispatcher dispatcher(e);
		dispatcher.Dispatch<MouseScrolledEvent>(YT_BIND_EVENT_FN(OrthographicCameraController::OnMouseScrolled));
		dispatcher.Dispatch<WindowResizeEvent>(YT_BIND_EVENT_FN(OrthographicCameraController::OnWindowResized));
	}
	bool OrthographicCameraController::OnMouseScrolled(MouseScrolledEvent& e)
	{
		m_ZoomLevel -= e.GetYOffset()*0.5f;
		m_ZoomLevel = std::max(m_ZoomLevel, 0.25f);
		m_Camera.SetProjection(-m_AspectRatio * m_ZoomLevel, m_AspectRatio * m_ZoomLevel, -m_ZoomLevel, m_ZoomLevel);
		return false;
	}
	bool OrthographicCameraController::OnWindowResized(WindowResizeEvent& e)
	{
		m_AspectRatio = (float)e.GetWidth()/(float) e.GetHeight();
		m_Camera.SetProjection(-m_AspectRatio * m_ZoomLevel, m_AspectRatio * m_ZoomLevel, -m_ZoomLevel, m_ZoomLevel);
		return false;
	}
}

SandboxApp.cpp:

#include<YOTO.h>
#include "imgui/imgui.h"
#include<stdio.h>
#include <glm/gtc/matrix_transform.hpp>
#include <Platform/OpenGL/OpenGLShader.h>
#include <glm/gtc/type_ptr.hpp>


class ExampleLayer:public YOTO::Layer
{
public:
	ExampleLayer()
	:Layer("Example"),  m_CameraController(1280.0f/720.0f,true){
		uint32_t indices[3] = { 0,1,2 };
		float vertices[3 * 7] = {
			-0.5f,-0.5f,0.0f, 0.8f,0.2f,0.8f,1.0f,
			0.5f,-0.5f,0.0f,  0.2f,0.3f,0.8f,1.0f,
			0.0f,0.5f,0.0f,   0.8f,0.8f,0.2f,1.0f,
		};

		m_VertexArray.reset(YOTO::VertexArray::Create());


		YOTO::Ref<YOTO::VertexBuffer> m_VertexBuffer;
		m_VertexBuffer.reset(YOTO::VertexBuffer::Create(vertices, sizeof(vertices)));

		{
			YOTO::BufferLayout setlayout = {

	{YOTO::ShaderDataType::Float3,"a_Position"},
		{YOTO::ShaderDataType::Float4,"a_Color"}
			};
			m_VertexBuffer->SetLayout(setlayout);

		}

		m_VertexArray->AddVertexBuffer(m_VertexBuffer);


		YOTO::Ref<YOTO::IndexBuffer>m_IndexBuffer;
		m_IndexBuffer.reset(YOTO::IndexBuffer::Create(indices, sizeof(indices) / sizeof(uint32_t)));

		m_VertexArray->AddIndexBuffer(m_IndexBuffer);

		std::string vertexSource = R"(
		#version 330 core
		layout(location = 0) in vec3 a_Position;
		layout(location = 1) in vec4 a_Color;
		uniform mat4 u_ViewProjection;
		uniform mat4 u_Transform;
		out vec3 v_Position;
		out vec4 v_Color;
		void main(){
		v_Position=a_Position;
		v_Color=a_Color;
		gl_Position =u_ViewProjection *u_Transform* vec4( a_Position,1.0);
		}
		)";
		//绘制颜色
		std::string fragmentSource = R"(
		#version 330 core
		layout(location = 0) out vec4 color;
		in vec3 v_Position;
		in vec4 v_Color;
		void main(){
		color=vec4(v_Color);
		}
		)";
		m_Shader=(YOTO::Shader::Create("VertexPosColor", vertexSource, fragmentSource));

		///测试/

		m_SquareVA.reset(YOTO::VertexArray::Create());

		float squareVertices[5 * 4] = {
			-0.5f,-0.5f,0.0f, 0.0f,0.0f,
			0.5f,-0.5f,0.0f,  1.0f,0.0f,
			0.5f,0.5f,0.0f,   1.0f,1.0f,
			-0.5f,0.5f,0.0f,  0.0f,1.0f,
		};
		YOTO::Ref<YOTO::VertexBuffer> squareVB;
		squareVB.reset(YOTO::VertexBuffer::Create(squareVertices, sizeof(squareVertices)));
		squareVB->SetLayout({
			{YOTO::ShaderDataType::Float3,"a_Position"},
				{YOTO::ShaderDataType::Float2,"a_TexCoord"}
			});
		m_SquareVA->AddVertexBuffer(squareVB);
		uint32_t squareIndices[6] = { 0,1,2,2,3,0 };
		YOTO::Ref<YOTO::IndexBuffer> squareIB;

		squareIB.reset((YOTO::IndexBuffer::Create(squareIndices, sizeof(squareIndices) / sizeof(uint32_t))));

		m_SquareVA->AddIndexBuffer(squareIB);

		//测试:
		std::string BlueShaderVertexSource = R"(
		#version 330 core
		layout(location = 0) in vec3 a_Position;
		uniform mat4 u_ViewProjection;
		uniform mat4 u_Transform;

		out vec3 v_Position;
		void main(){
		v_Position=a_Position;
		gl_Position =u_ViewProjection*u_Transform*vec4( a_Position,1.0);
		}
		)";
		//绘制颜色
		std::string BlueShaderFragmentSource = R"(
		#version 330 core
		layout(location = 0) out vec4 color;

		in vec3 v_Position;
		uniform vec3 u_Color;
		void main(){
		color=vec4(u_Color,1.0);
		}
		)";
		m_BlueShader=(YOTO::Shader::Create("FlatColor", BlueShaderVertexSource, BlueShaderFragmentSource));

	auto textureShader=	m_ShaderLibrary.Load("assets/shaders/Texture.glsl");
		m_Texture=YOTO::Texture2D::Create("assets/textures/Checkerboard.png");
		m_ChernoLogo= YOTO::Texture2D::Create("assets/textures/ChernoLogo.png");
		std::dynamic_pointer_cast<YOTO::OpenGLShader>(textureShader)->Bind();
		std::dynamic_pointer_cast<YOTO::OpenGLShader>(textureShader)->UploadUniformInt("u_Texture", 0);
	}
	void OnImGuiRender() override {
		ImGui::Begin("设置");
		ImGui::ColorEdit3("正方形颜色", glm::value_ptr(m_SquareColor));
		ImGui::End();
	}
	void OnUpdate(YOTO::Timestep ts)override {
		//update
		m_CameraController.OnUpdate(ts);
		//YT_CLIENT_TRACE("delta time {0}s ({1}ms)", ts.GetSeconds(), ts.GetMilliseconds());
	

	
		//Render
		YOTO::RenderCommand::SetClearColor({ 0.2f, 0.2f, 0.2f, 1.0f });
		YOTO::RenderCommand::Clear();

		YOTO::Renderer::BeginScene(m_CameraController.GetCamera());
		{
			static glm::mat4 scale = glm::scale(glm::mat4(1.0f), glm::vec3(0.1f)); 
			glm::vec4  redColor(0.8f, 0.3f, 0.3f, 1.0f);
			glm::vec4  blueColor(0.2f, 0.3f, 0.8f, 1.0f);

	/*		YOTO::MaterialRef material = new YOTO::MaterialRef(m_FlatColorShader);
			YOTO::MaterialInstaceRef mi = new YOTO::MaterialInstaceRef(material);
			mi.setValue("u_Color",redColor);
			mi.setTexture("u_AlbedoMap", texture);
			squreMesh->SetMaterial(mi);*/

			std::dynamic_pointer_cast<YOTO::OpenGLShader>(m_BlueShader)->Bind();
			std::dynamic_pointer_cast<YOTO::OpenGLShader>(m_BlueShader)->UploadUniformFloat3("u_Color",m_SquareColor);
			for (int y = 0; y < 20; y++) {
				for (int x = 0; x <20; x++)
				{

					glm::vec3 pos(x * 0.105f,y* 0.105f, 0.0);
					glm::mat4 transform = glm::translate(glm::mat4(1.0f), pos) * scale;
				/*	if (x % 2 == 0) {
						m_BlueShader->UploadUniformFloat4("u_Color", redColor);
					}
					else {
						m_BlueShader->UploadUniformFloat4("u_Color", blueColor);
					}*/
		

					YOTO::Renderer::Submit(m_BlueShader, m_SquareVA, transform);
				}
			}
			
			auto textureShader = m_ShaderLibrary.Get("Texture");
			m_Texture->Bind();
			YOTO::Renderer::Submit(textureShader, m_SquareVA, glm::scale(glm::mat4(1.0f), glm::vec3(1.5f)));
			m_ChernoLogo->Bind();
			YOTO::Renderer::Submit(textureShader, m_SquareVA, glm::scale(glm::mat4(1.0f), glm::vec3(1.5f)));

			//YOTO::Renderer::Submit(m_Shader, m_VertexArray);


		
		}	
		YOTO::Renderer::EndScene();

		
		//YOTO::Renderer3D::BeginScene(m_Scene);
		//YOTO::Renderer2D::BeginScene(m_Scene);
	}
	void OnEvent(YOTO::Event& e)override {

		m_CameraController.OnEvent(e);
		/*if (event.GetEventType() == YOTO::EventType::KeyPressed) {
		YOTO:: KeyPressedEvent& e = (YOTO::KeyPressedEvent&)event;
		YT_CLIENT_TRACE("ExampleLayer:{0}",(char)e.GetKeyCode());
		if (e.GetKeyCode()==YT_KEY_TAB) {
			YT_CLIENT_INFO("ExampleLayerOnEvent:TAB按下了");
		}}*/
		//YT_CLIENT_TRACE("SandBoxApp:测试event{0}", event);




	}


private:
	YOTO::ShaderLibrary m_ShaderLibrary;
	YOTO::Ref<YOTO::Shader> m_Shader;
	YOTO::Ref<YOTO::VertexArray> m_VertexArray;


	YOTO::Ref<YOTO::Shader> m_BlueShader;
	YOTO::Ref<YOTO::VertexArray> m_SquareVA;
	
	YOTO::Ref<YOTO::Texture2D> m_Texture,m_ChernoLogo;

	YOTO::OrthographicCameraController m_CameraController;
	glm::vec3 m_SquareColor = { 0.2f,0.3f,0.7f };

};


class Sandbox:public YOTO::Application
{
public:
	Sandbox(){
		PushLayer(new ExampleLayer());
		//PushLayer(new YOTO::ImGuiLayer());
	}
	~Sandbox() {

	}

private:

};

YOTO::Application* YOTO::CreateApplication() {
	printf("helloworld");
	return new Sandbox();
}

测试:

 cool

修改:

添加修改视口的方法:

Render.h:

#pragma once
#include"RenderCommand.h"
#include "OrthographicCamera.h"
#include"Shader.h"
namespace YOTO {

	class Renderer {
	public:


		static void Init();
		static void OnWindowResize(uint32_t width,uint32_t height);
		static void BeginScene(OrthographicCamera& camera);
		static void EndScene();

		static void Submit(const Ref<Shader>& shader, const Ref<VertexArray>& vertexArray,const glm::mat4&transform = glm::mat4(1.0f));
		
		inline static RendererAPI::API GetAPI() {
			return RendererAPI::GetAPI();
		}
	private:
		struct SceneData {
			glm::mat4 ViewProjectionMatrix;
		};
		static SceneData* m_SceneData;
	};

}

Render.cpp: 

#include"ytpch.h"
#include"Renderer.h"
#include <Platform/OpenGL/OpenGLShader.h>
namespace YOTO {
	Renderer::SceneData* Renderer::m_SceneData = new	Renderer::SceneData;
	void Renderer::Init()
	{
		RenderCommand::Init();
	}
	void Renderer::OnWindowResize(uint32_t width, uint32_t height)
	{
		RenderCommand::SetViewport(0, 0, width, height);
	}
	void Renderer::BeginScene(OrthographicCamera& camera)
	{
		m_SceneData->ViewProjectionMatrix = camera.GetViewProjectionMatrix();
	}
	void Renderer::EndScene()
	{
	}
	void Renderer::Submit(const Ref<Shader>& shader, const Ref<VertexArray>& vertexArray, const glm::mat4& transform)
	{
		shader->Bind();
		std::dynamic_pointer_cast<OpenGLShader>(shader)->UploadUniformMat4("u_ViewProjection", m_SceneData->ViewProjectionMatrix);
		std::dynamic_pointer_cast<OpenGLShader>(shader)->UploadUniformMat4("u_Transform", transform);
	/*	mi.Bind();*/

		vertexArray->Bind();
		RenderCommand::DrawIndexed(vertexArray);
	}
}

RenderCommand.h:

#pragma once
#include"RendererAPI.h"
namespace YOTO {
	class RenderCommand
	{
	public:
		inline static void Init() {
			s_RendererAPI->Init();
		}
		inline static void SetViewport(uint32_t x, uint32_t y, uint32_t width, uint32_t height) {
			s_RendererAPI->SetViewport(x,y,width,height);
		}

		inline static void SetClearColor(const glm::vec4& color) {
			s_RendererAPI->SetClearColor(color);
		}
		inline static void Clear() {
			s_RendererAPI->Clear();
		}
		inline static void DrawIndexed(const Ref<VertexArray>& vertexArray) {
			s_RendererAPI->DrawIndexed(vertexArray);
		}
		
	private:
		static RendererAPI* s_RendererAPI;

	};

}

RenderAPI.h:

#pragma once
#include<glm/glm.hpp>
#include "VertexArray.h"
namespace YOTO {
	class RendererAPI
	{
	public:
		enum class API {
			None = 0,
			OpenGL = 1
		};
	public:
		virtual void Init() = 0;
		virtual void SetClearColor(const glm::vec4& color)=0;
		virtual void SetViewport(uint32_t x, uint32_t y, uint32_t width, uint32_t height) = 0;
		virtual void Clear() = 0;
		virtual void DrawIndexed(const Ref<VertexArray>& vertexArray)=0;

		inline static API GetAPI() { return s_API; }
	private:
		static API s_API;
	};
}


OpenGLRenderAPI.h:

#pragma once
#include"YOTO/Renderer/RendererAPI.h"
namespace YOTO {
	class OpenGLRendererAPI:public RendererAPI
	{
	public:
		virtual void Init()override;
		virtual void SetViewport(uint32_t x, uint32_t y, uint32_t width, uint32_t height) override;
		virtual void SetClearColor(const glm::vec4& color)override;
		virtual void Clear()override;
		virtual void DrawIndexed(const Ref<VertexArray>& vertexArray) override;
	};
}


OpenGLRenderAPI.cpp: 

#include "ytpch.h"
#include "OpenGLRendererAPI.h"
#include <glad/glad.h>
namespace YOTO {
	void OpenGLRendererAPI::Init()
	{
		//启用混合
		glEnable(GL_BLEND);
		//设置混合函数
		glBlendFunc(GL_SRC_ALPHA,GL_ONE_MINUS_SRC_ALPHA);
	}
	void OpenGLRendererAPI::SetViewport(uint32_t x, uint32_t y, uint32_t width, uint32_t height)
	{
		glViewport(x, y, width, height);
	}
	void OpenGLRendererAPI::SetClearColor(const glm::vec4& color)
	{
		glClearColor(color.r, color.g, color.b, color.a);
	}
	void OpenGLRendererAPI::Clear()
	{
		glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
	}
	void OpenGLRendererAPI::DrawIndexed(const Ref<VertexArray>& vertexArray)
	{
		glDrawElements(GL_TRIANGLES, vertexArray->GetIndexBuffer()->GetCount(), GL_UNSIGNED_INT, nullptr);
	}
}

OrthographicCameraController.h: 

#pragma once
#include "YOTO/Renderer/OrthographicCamera.h"
#include"YOTO/Core/Timestep.h"
#include "YOTO/Event/ApplicationEvent.h"
#include "YOTO/Event/MouseEvent.h"
namespace YOTO {
	class OrthographicCameraController {
	public:
		OrthographicCameraController(float aspectRatio, bool rotation=false );//0,1280
		void OnUpdate(Timestep ts);
		void OnEvent(Event& e);
		OrthographicCamera& GetCamera() {
			return m_Camera
				;
		}
		const OrthographicCamera& GetCamera()const  {
			return m_Camera
				;
		}
		float  GetZoomLevel() { return m_ZoomLevel; }
		void SetZoomLevel(float level) { m_ZoomLevel = level; }

	private:
		bool OnMouseScrolled(MouseScrolledEvent &e);
		bool OnWindowResized(WindowResizeEvent& e);
	private:

		float m_AspectRatio;//横纵比
		float m_ZoomLevel = 1.0f;
		OrthographicCamera m_Camera;

		bool m_Rotation;

		glm::vec3 m_CameraPosition = {0.0f,0.0f,0.0f};
		float m_CameraRotation = 0.0f;
		float m_CameraTranslationSpeed = 1.0f, m_CameraRotationSpeed = 180.0f;
	};
}

OrthographicCameraController.cpp:  

#include "ytpch.h"
#include "OrthographicCameraController.h"
#include"YOTO/Input.h"
#include <YOTO/KeyCode.h>
namespace YOTO {

	OrthographicCameraController::OrthographicCameraController(float aspectRatio, bool rotation)
		:m_AspectRatio(aspectRatio),m_Camera(-m_AspectRatio*m_ZoomLevel,m_AspectRatio*m_ZoomLevel,-m_ZoomLevel,m_ZoomLevel),m_Rotation(rotation)
	{

	}

	 
	void OrthographicCameraController::OnUpdate(Timestep ts)
	{
		if (Input::IsKeyPressed(YT_KEY_A)) {
			m_CameraPosition.x -= m_CameraTranslationSpeed * ts;
		}
		else if (Input::IsKeyPressed(YT_KEY_D)) {
			m_CameraPosition.x += m_CameraTranslationSpeed * ts;
		}
		if (Input::IsKeyPressed(YT_KEY_S)) {
			m_CameraPosition.y -= m_CameraTranslationSpeed * ts;
		}
		else if (Input::IsKeyPressed(YT_KEY_W)) {
			m_CameraPosition.y += m_CameraTranslationSpeed * ts;
		}
		if (m_Rotation) {
			if (Input::IsKeyPressed(YT_KEY_Q)) {
				m_CameraRotation += m_CameraRotationSpeed * ts;
			}
			else if (Input::IsKeyPressed(YT_KEY_E)) {
				m_CameraRotation -= m_CameraRotationSpeed * ts;
			}
			m_Camera.SetRotation(m_CameraRotation);
		}
		m_Camera.SetPosition(m_CameraPosition);
		m_CameraTranslationSpeed = m_ZoomLevel;
		}

	
	void OrthographicCameraController::OnEvent(Event& e)
	{
		EventDispatcher dispatcher(e);
		dispatcher.Dispatch<MouseScrolledEvent>(YT_BIND_EVENT_FN(OrthographicCameraController::OnMouseScrolled));
		dispatcher.Dispatch<WindowResizeEvent>(YT_BIND_EVENT_FN(OrthographicCameraController::OnWindowResized));
	}
	bool OrthographicCameraController::OnMouseScrolled(MouseScrolledEvent& e)
	{
		m_ZoomLevel -= e.GetYOffset()*0.5f;
		m_ZoomLevel = std::max(m_ZoomLevel, 0.25f);
		m_Camera.SetProjection(-m_AspectRatio * m_ZoomLevel, m_AspectRatio * m_ZoomLevel, -m_ZoomLevel, m_ZoomLevel);
		return false;
	}
	bool OrthographicCameraController::OnWindowResized(WindowResizeEvent& e)
	{
		m_AspectRatio = (float)e.GetWidth()/(float) e.GetHeight();
		m_Camera.SetProjection(-m_AspectRatio * m_ZoomLevel, m_AspectRatio * m_ZoomLevel, -m_ZoomLevel, m_ZoomLevel);
		return false;
	}
}

Application.h:

#pragma once
#include"Core.h"
#include"Event/Event.h"
#include"Event/ApplicationEvent.h"
#include "YOTO/Window.h"
#include"YOTO/LayerStack.h"
#include"YOTO/ImGui/ImGuiLayer.h"

#include "YOTO/Core/Timestep.h"
#include "YOTO/Renderer/OrthographicCamera.h"
namespace YOTO {
	class YOTO_API Application
	{
	public:
		Application();
		virtual ~Application();
		void Run();
		void OnEvent(Event& e);
		void PushLayer(Layer* layer);
		void PushOverlay(Layer* layer);

		inline static Application& Get() { return *s_Instance; }
		inline Window& GetWindow() { return *m_Window; }
	private:
		bool  OnWindowClosed(WindowCloseEvent& e);
		bool  OnWindowResize(WindowResizeEvent& e);
	private:
		std::unique_ptr<Window>  m_Window;
		ImGuiLayer* m_ImGuiLayer;
		bool m_Running = true;
		bool m_Minimized = false;
		LayerStack m_LayerStack;
		Timestep m_Timestep;
		float m_LastFrameTime = 0.0f;
		 

	
	private:
		static Application* s_Instance;
	};
	//在客户端定义
	Application* CreateApplication();
}

Application.cpp: 

#include"ytpch.h"
#include "Application.h"

#include"Log.h"
#include "YOTO/Renderer/Renderer.h"
#include"Input.h"
#include <GLFW/glfw3.h>


namespace YOTO {
#define BIND_EVENT_FN(x) std::bind(&x, this, std::placeholders::_1)

	Application* Application::s_Instance = nullptr;

	Application::Application()
	{

		YT_CORE_ASSERT(!s_Instance, "Application需要为空!")
			s_Instance = this;
		//智能指针
		m_Window = std::unique_ptr<Window>(Window::Creat());
		//设置回调函数
		m_Window->SetEventCallback(BIND_EVENT_FN(Application::OnEvent));

		m_Window->SetVSync(false);

		Renderer::Init();

		//new一个Layer,放在最后层进行渲染
		m_ImGuiLayer = new ImGuiLayer();
		PushOverlay(m_ImGuiLayer);	
	}
	Application::~Application() {
	}
	/// <summary>
	/// 所有的Window事件都会在这触发,作为参数e
	/// </summary>
	/// <param name="e"></param>
	void Application::OnEvent(Event& e) {
		//根据事件类型绑定对应事件
		EventDispatcher dispatcher(e);
		dispatcher.Dispatch<WindowCloseEvent>(BIND_EVENT_FN(Application::OnWindowClosed));
		dispatcher.Dispatch<WindowResizeEvent>(BIND_EVENT_FN(Application::OnWindowResize));
		//输出事件信息
		YT_CORE_INFO("Application:{0}", e);
		for (auto it = m_LayerStack.end(); it != m_LayerStack.begin();) {
			(*--it)->OnEvent(e);
			if (e.m_Handled)
				break;
		}
	}

	bool Application::OnWindowClosed(WindowCloseEvent& e) {
		m_Running = false;
		return true;
	}
	bool Application::OnWindowResize(WindowResizeEvent& e)
	{
		if (e.GetWidth()==0||e.GetHeight()==0) {
			m_Minimized = true;
			return false;
		}
		m_Minimized = false;
		//调整视口
		Renderer::OnWindowResize(e.GetWidth(), e.GetHeight());


		return false;
	}
	void Application::Run() {
		WindowResizeEvent e(1280, 720);
		if (e.IsInCategory(EventCategoryApplication)) {
			YT_CORE_TRACE(e);
		}
		if (e.IsInCategory(EventCategoryInput)) {
			YT_CORE_ERROR(e);
		}

		while (m_Running)
		{
			float time = (float)glfwGetTime();//window平台
			Timestep timestep = time - m_LastFrameTime;
			m_LastFrameTime = time;
			if (!m_Minimized) {

				for (Layer* layer : m_LayerStack) {
					layer->OnUpdate(timestep);
				}
			}	
			//将ImGui的刷新放到APP中,与Update分开
			m_ImGuiLayer->Begin();

			for (Layer* layer : m_LayerStack) {
				layer->OnImGuiRender();
			}
			m_ImGuiLayer->End();

			

			m_Window->OnUpdate();
		}
	}
	void Application::PushLayer(Layer* layer) {
		m_LayerStack.PushLayer(layer);
		layer->OnAttach();
	}
	void Application::PushOverlay(Layer* layer) {
		m_LayerStack.PushOverlay(layer);
		layer->OnAttach();
	}
}

SandboxApp.cpp:

#include<YOTO.h>
#include "imgui/imgui.h"
#include<stdio.h>
#include <glm/gtc/matrix_transform.hpp>
#include <Platform/OpenGL/OpenGLShader.h>
#include <glm/gtc/type_ptr.hpp>


class ExampleLayer:public YOTO::Layer
{
public:
	ExampleLayer()
	:Layer("Example"),  m_CameraController(1280.0f/720.0f,true){
		uint32_t indices[3] = { 0,1,2 };
		float vertices[3 * 7] = {
			-0.5f,-0.5f,0.0f, 0.8f,0.2f,0.8f,1.0f,
			0.5f,-0.5f,0.0f,  0.2f,0.3f,0.8f,1.0f,
			0.0f,0.5f,0.0f,   0.8f,0.8f,0.2f,1.0f,
		};

		m_VertexArray.reset(YOTO::VertexArray::Create());


		YOTO::Ref<YOTO::VertexBuffer> m_VertexBuffer;
		m_VertexBuffer.reset(YOTO::VertexBuffer::Create(vertices, sizeof(vertices)));

		{
			YOTO::BufferLayout setlayout = {

	{YOTO::ShaderDataType::Float3,"a_Position"},
		{YOTO::ShaderDataType::Float4,"a_Color"}
			};
			m_VertexBuffer->SetLayout(setlayout);

		}

		m_VertexArray->AddVertexBuffer(m_VertexBuffer);


		YOTO::Ref<YOTO::IndexBuffer>m_IndexBuffer;
		m_IndexBuffer.reset(YOTO::IndexBuffer::Create(indices, sizeof(indices) / sizeof(uint32_t)));

		m_VertexArray->AddIndexBuffer(m_IndexBuffer);

		std::string vertexSource = R"(
		#version 330 core
		layout(location = 0) in vec3 a_Position;
		layout(location = 1) in vec4 a_Color;
		uniform mat4 u_ViewProjection;
		uniform mat4 u_Transform;
		out vec3 v_Position;
		out vec4 v_Color;
		void main(){
		v_Position=a_Position;
		v_Color=a_Color;
		gl_Position =u_ViewProjection *u_Transform* vec4( a_Position,1.0);
		}
		)";
		//绘制颜色
		std::string fragmentSource = R"(
		#version 330 core
		layout(location = 0) out vec4 color;
		in vec3 v_Position;
		in vec4 v_Color;
		void main(){
		color=vec4(v_Color);
		}
		)";
		m_Shader=(YOTO::Shader::Create("VertexPosColor", vertexSource, fragmentSource));

		///测试/

		m_SquareVA.reset(YOTO::VertexArray::Create());

		float squareVertices[5 * 4] = {
			-0.5f,-0.5f,0.0f, 0.0f,0.0f,
			0.5f,-0.5f,0.0f,  1.0f,0.0f,
			0.5f,0.5f,0.0f,   1.0f,1.0f,
			-0.5f,0.5f,0.0f,  0.0f,1.0f,
		};
		YOTO::Ref<YOTO::VertexBuffer> squareVB;
		squareVB.reset(YOTO::VertexBuffer::Create(squareVertices, sizeof(squareVertices)));
		squareVB->SetLayout({
			{YOTO::ShaderDataType::Float3,"a_Position"},
				{YOTO::ShaderDataType::Float2,"a_TexCoord"}
			});
		m_SquareVA->AddVertexBuffer(squareVB);
		uint32_t squareIndices[6] = { 0,1,2,2,3,0 };
		YOTO::Ref<YOTO::IndexBuffer> squareIB;

		squareIB.reset((YOTO::IndexBuffer::Create(squareIndices, sizeof(squareIndices) / sizeof(uint32_t))));

		m_SquareVA->AddIndexBuffer(squareIB);

		//测试:
		std::string BlueShaderVertexSource = R"(
		#version 330 core
		layout(location = 0) in vec3 a_Position;
		uniform mat4 u_ViewProjection;
		uniform mat4 u_Transform;

		out vec3 v_Position;
		void main(){
		v_Position=a_Position;
		gl_Position =u_ViewProjection*u_Transform*vec4( a_Position,1.0);
		}
		)";
		//绘制颜色
		std::string BlueShaderFragmentSource = R"(
		#version 330 core
		layout(location = 0) out vec4 color;

		in vec3 v_Position;
		uniform vec3 u_Color;
		void main(){
		color=vec4(u_Color,1.0);
		}
		)";
		m_BlueShader=(YOTO::Shader::Create("FlatColor", BlueShaderVertexSource, BlueShaderFragmentSource));

	auto textureShader=	m_ShaderLibrary.Load("assets/shaders/Texture.glsl");
		m_Texture=YOTO::Texture2D::Create("assets/textures/Checkerboard.png");
		m_ChernoLogo= YOTO::Texture2D::Create("assets/textures/ChernoLogo.png");
		std::dynamic_pointer_cast<YOTO::OpenGLShader>(textureShader)->Bind();
		std::dynamic_pointer_cast<YOTO::OpenGLShader>(textureShader)->UploadUniformInt("u_Texture", 0);
	}
	void OnImGuiRender() override {
		ImGui::Begin("设置");
		ImGui::ColorEdit3("正方形颜色", glm::value_ptr(m_SquareColor));
		ImGui::End();
	}
	void OnUpdate(YOTO::Timestep ts)override {
		//update
		m_CameraController.OnUpdate(ts);
		//YT_CLIENT_TRACE("delta time {0}s ({1}ms)", ts.GetSeconds(), ts.GetMilliseconds());
	

	
		//Render
		YOTO::RenderCommand::SetClearColor({ 0.2f, 0.2f, 0.2f, 1.0f });
		YOTO::RenderCommand::Clear();

		YOTO::Renderer::BeginScene(m_CameraController.GetCamera());
		{
			static glm::mat4 scale = glm::scale(glm::mat4(1.0f), glm::vec3(0.1f)); 
			glm::vec4  redColor(0.8f, 0.3f, 0.3f, 1.0f);
			glm::vec4  blueColor(0.2f, 0.3f, 0.8f, 1.0f);

	/*		YOTO::MaterialRef material = new YOTO::MaterialRef(m_FlatColorShader);
			YOTO::MaterialInstaceRef mi = new YOTO::MaterialInstaceRef(material);
			mi.setValue("u_Color",redColor);
			mi.setTexture("u_AlbedoMap", texture);
			squreMesh->SetMaterial(mi);*/

			std::dynamic_pointer_cast<YOTO::OpenGLShader>(m_BlueShader)->Bind();
			std::dynamic_pointer_cast<YOTO::OpenGLShader>(m_BlueShader)->UploadUniformFloat3("u_Color",m_SquareColor);
			for (int y = 0; y < 20; y++) {
				for (int x = 0; x <20; x++)
				{

					glm::vec3 pos(x * 0.105f,y* 0.105f, 0.0);
					glm::mat4 transform = glm::translate(glm::mat4(1.0f), pos) * scale;
				/*	if (x % 2 == 0) {
						m_BlueShader->UploadUniformFloat4("u_Color", redColor);
					}
					else {
						m_BlueShader->UploadUniformFloat4("u_Color", blueColor);
					}*/
		

					YOTO::Renderer::Submit(m_BlueShader, m_SquareVA, transform);
				}
			}
			
			auto textureShader = m_ShaderLibrary.Get("Texture");
			m_Texture->Bind();
			YOTO::Renderer::Submit(textureShader, m_SquareVA, glm::scale(glm::mat4(1.0f), glm::vec3(1.5f)));
			m_ChernoLogo->Bind();
			YOTO::Renderer::Submit(textureShader, m_SquareVA, glm::scale(glm::mat4(1.0f), glm::vec3(1.5f)));

			//YOTO::Renderer::Submit(m_Shader, m_VertexArray);


		
		}	
		YOTO::Renderer::EndScene();

		
		//YOTO::Renderer3D::BeginScene(m_Scene);
		//YOTO::Renderer2D::BeginScene(m_Scene);
	}
	void OnEvent(YOTO::Event& e)override {

		m_CameraController.OnEvent(e);
		if (e.GetEventType() == YOTO::EventType::WindowResize) {
			auto& re = (YOTO::WindowResizeEvent&)e;
	/*		float zoom = re.GetWidth() / 1280.0f;
			m_CameraController.SetZoomLevel(zoom);*/
		}




	}


private:
	YOTO::ShaderLibrary m_ShaderLibrary;
	YOTO::Ref<YOTO::Shader> m_Shader;
	YOTO::Ref<YOTO::VertexArray> m_VertexArray;


	YOTO::Ref<YOTO::Shader> m_BlueShader;
	YOTO::Ref<YOTO::VertexArray> m_SquareVA;
	
	YOTO::Ref<YOTO::Texture2D> m_Texture,m_ChernoLogo;

	YOTO::OrthographicCameraController m_CameraController;
	glm::vec3 m_SquareColor = { 0.2f,0.3f,0.7f };

};


class Sandbox:public YOTO::Application
{
public:
	Sandbox(){
		PushLayer(new ExampleLayer());
		//PushLayer(new YOTO::ImGuiLayer());
	}
	~Sandbox() {

	}

private:

};

YOTO::Application* YOTO::CreateApplication() {
	printf("helloworld");
	return new Sandbox();
}

添加了新功能,修改窗口大小在OnEvent中。

本文来自互联网用户投稿,该文观点仅代表作者本人,不代表本站立场。本站仅提供信息存储空间服务,不拥有所有权,不承担相关法律责任。如若转载,请注明出处:http://www.coloradmin.cn/o/1442561.html

如若内容造成侵权/违法违规/事实不符,请联系多彩编程网进行投诉反馈,一经查实,立即删除!

相关文章

【开源】JAVA+Vue+SpringBoot实现班级考勤管理系统

目录 一、摘要1.1 项目介绍1.2 项目录屏 二、功能模块2.1 系统基础支持模块2.2 班级学生教师支持模块2.3 考勤签到管理2.4 学生请假管理 三、系统设计3.1 功能设计3.1.1 系统基础支持模块3.1.2 班级学生教师档案模块3.1.3 考勤签到管理模块3.1.4 学生请假管理模块 3.2 数据库设…

深入理解java之多线程(一)

前言&#xff1a; 本章节我们将开始学习多线程&#xff0c;多线程是一个很重要的知识点&#xff0c;他在我们实际开发中应用广泛并且基础&#xff0c;可以说掌握多线程编写程序是每一个程序员都应当必备的技能&#xff0c;很多小伙伴也会吐槽多线程比较难&#xff0c;但因为其实…

春晚刘谦第二个魔术原理讲解

目录 1. 先说一下步骤&#xff1a;2. 原理讲解&#xff1a;2.1 第一步分析2.1 第二步分析2.1 第三步分析2.1 第四步分析2.1 第五步分析2.1 第六步分析2.1 第七步分析2.1 第八步分析2.1 第七步重新分析 小结&#xff1a; 首先&#xff0c;先叠个甲。我本人很喜欢刘谦老师&#x…

H12-821_23

23.网络管理员在某台路由器上查看BGP部居信息,邻居信息如图所示,关于该信息下列说法中正确的是? A.该路由器未从对等体接收到BGP路由前缀 B.本地路由器的Router ID为1.1.1.1 C.与对等体3.3.3 3接收、发送的报文数量分别为10、20个 D.与对等体3.3.3.3之间的邻居关系为IBGP对等体…

使用Arcgis裁剪

提示&#xff1a;文章写完后&#xff0c;目录可以自动生成&#xff0c;如何生成可参考右边的帮助文档 文章目录 前言一、掩膜提取二、随意裁剪三、裁剪 前言 因为从网站下载的是全球气候数据&#xff0c;而我们需要截取成中国部分&#xff0c;需要用到Arcgis的裁剪工具 一、掩…

批归一化(Batch Normalization,简称BN)层的作用!!

批归一化&#xff08;Batch Normalization&#xff0c;简称BN&#xff09;层在卷积神经网络中的作用主要有以下几点&#xff1a; 规范化数据&#xff1a;批归一化可以对每一批数据进行归一化处理&#xff0c;使其均值接近0&#xff0c;方差接近1。这有助于解决内部协变量偏移&…

龙年定制红包封面赠送第三波

你好啊&#xff0c;今天是大年三十&#xff0c;外面已是爆竹声声&#xff0c;年味十足。祝你&#xff0c;我亲爱的读者&#xff0c;甲辰龙年新春快乐&#xff0c;万事如意&#xff01; 最近一直有读者提出&#xff0c;希望我能再发一波红包封面。 尤其是「经典」版本的&#xf…

刚买就下跌,如何快速回本?试试补仓回本计算器,股票量化分析工具QTYX-V2.7.7...

前言 祝大家除夕夜快乐&#xff0c;阖家欢乐&#xff0c;万事如意&#xff01;本周大A连续三个大阳线&#xff0c;终于在节前企稳了一把&#xff01; 虽然前两周市场哀鸿遍野地下跌&#xff0c;但是星球群不少大佬在这三天的反弹中账户不但回本而且盈利不少&#xff01;一番交流…

【2023年终总结】感恩南洋经历,2024收拾再启程

新年祝福 值此2024农历新年到来之际&#xff0c;祝一直支持“IT进阶之旅”的各位小伙伴们新的一年伴随着新的开始&#xff0c;新的旅程&#xff0c;新的突破&#xff0c;新的收获&#xff0c;新的期待..... 写在前面 2023&#xff0c;“IT进阶之旅”一直处于“停更”状态&#…

问题:3【单选题】实现职业理想的一般步骤是()。 #媒体#媒体

问题&#xff1a;3【单选题】实现职业理想的一般步骤是()。 A、创业-立业-择业 B、择业-创业-立业 C、择业-立业-创业 D、立业-择业-创业 参考答案如图所示

Android SystemConfig相关

SystemConfig在哪里初始化 它声明在PackageManagerService类的静态方法main()中。在该方法中间定义Injector类对象时&#xff0c;作为它的构造参数。它是调用的SystemConfig.getInstance()实现初始化&#xff0c;之后能通过Injector类对象的getSystemConfig()得到SystemConfig类…

【动态规划】【C++算法】LeetCoce996正方形数组的数目

作者推荐 【动态规划】【前缀和】【C算法】LCP 57. 打地鼠 本文涉及知识点 动态规划汇总 LeetCoce996正方形数组的数目 给定一个非负整数数组 A&#xff0c;如果该数组每对相邻元素之和是一个完全平方数&#xff0c;则称这一数组为正方形数组。 返回 A 的正方形排列的数目…

从Socket中解析Http协议实现通信

在网络协议中&#xff0c;Socket是连接应用层和运输层的中间层&#xff0c;主要作用为了通信。Http协议是应用层上的封装协议。我们可以通过Http协议的规范解析Socket中数据&#xff0c;完成Http通信。 首先&#xff0c;我们先回顾一下Http协议的规范。主要复习一下&#xff0c…

电脑数据误删如何恢复?9 个Windows 数据恢复方案

无论您是由于软件或硬件故障、网络犯罪还是意外删除而丢失数据&#xff0c;数据丢失都会带来压力和令人不快。 如今的企业通常将其重要数据存储在云或硬盘上。但在执行其中任何一项操作之前&#xff0c;您很有可能会丢失数据。 数据丢失的主要原因是意外删除&#xff0c;任何…

Linux匿名管道

目录 1.原理 1.直接原理 2.本质原理 2.管道接口 3.管道中的四种情况 1.读写端正常&#xff0c;管道如果为空&#xff0c;读端就要堵塞 2.读写端正常&#xff0c;管道如果被写满&#xff0c;写端就要堵塞 3.读端正常&#xff0c;写端关闭&#xff0c;读端就会读到0&#…

Mysql Day03

多表设计 一对多 在多的一方添加外键约束&#xff0c;关联另外一方主键 一对一 任意一方添加外键约束&#xff0c;关联另外一方主键 多对多 建立第三张中间表&#xff0c;中间表至少包含两个外键&#xff0c;分别关联两方主键 idstu_idcourse_id 1 11 2 12313421524 案…

机器学习系列6-逻辑回归

重点&#xff1a; 1.逻辑回归模型会生成概率。 2. 对数损失是逻辑回归的损失函数。 3. 逻辑回归被许多从业者广泛使用。 # 1.逻辑回归&#xff1a;计算概率 **许多问题需要将概率估算值作为输出。逻辑回归是一种非常高的概率计算机制。** 实际上&#xff0c;您可以通过以下两种…

test222

欢迎关注博主 Mindtechnist 或加入【Linux C/C/Python社区】一起探讨和分享Linux C/C/Python/Shell编程、机器人技术、机器学习、机器视觉、嵌入式AI相关领域的知识和技术。 磁盘满的本质分析 专栏&#xff1a;《Linux从小白到大神》 | 系统学习Linux开发、VIM/GCC/GDB/Make工具…

记:STM32F4参考手册-存储器和总线架构

STM32F4参考手册-存储器和总线架构 系统架构 主系统由32位多层AHB总线矩阵构成&#xff0c;可实现以下部分部分的互连&#xff1a; 八条主控总线&#xff1a; Cortex-M4F内核I总线、D总线和S总线 DMA1存储器总线 DMA2存储器总线 DMA2外设总线 以太网DMA总线 USB OTG HS DMA总线…

【Python中Selenium元素定位的各种方法】

1、元素定位操作&#xff1a; 2、创建浏览器驱动操作&#xff0c;导入By模块&#xff1a; from selenium import webdriver # 用于界面与浏览器互动 from selenium.webdriver.common.by import By # 用于元素定位 driver webdriver.Chrome() # 调用Chrome类&#xff0c;创…