Three.js 镜面反射Reflector 为MeshStandardMaterial增加Reflector能力

news2024/12/23 16:22:46
效果效果官方案例
在这里插入图片描述在这里插入图片描述在这里插入图片描述

区别:官方的案例更像一个镜子 没有纹理等属性 也没有透明度修改

根据源码进行修改为 MeshStandardMaterial实现反射

使用案例

createReflector() {
        const plane = this.helper.create.plane(2, 2);
        this.helper.add(plane.mesh);
        plane.mesh.rotateX(Math.PI / -2);
        plane.mesh.position.y -= 0.5;

        const material = plane.mesh.material;
        console.log(material);

        plane.mesh.material = new THREE.MeshStandardMaterial({
            map: this.helper.loadTexture(
                "/public/textures/wallhaven-kxj3l1_840x840.png",
                (t) => {
                    t.colorSpace = THREE.SRGBColorSpace;
                }
            ),
            transparent: true,
            opacity: 0.3,
        });
        addReflectorEffect(plane.mesh);
        {
            const plane = this.helper.create.plane(100, 100);
            this.helper.add(plane.mesh);
            plane.mesh.rotateY(Math.PI / 2);
            plane.mesh.position.x -= 1.5;
            plane.mesh.material = new THREE.MeshStandardMaterial({
                map: this.helper.loadTexture(
                    "/public/textures/building.png",
                    (t) => {
                        t.colorSpace = THREE.SRGBColorSpace;
                    }
                ),
                normalMap: this.helper.loadTexture(
                    "/public/textures/wallhaven-kxj3l1_840x840.png",
                    (t) => {
                        t.colorSpace = THREE.SRGBColorSpace;
                    }
                ),
            });
            addReflectorEffect(plane.mesh);
        }
    }

源码:

import {
    Color,
    Matrix4,
    Mesh,
    PerspectiveCamera,
    Plane,
    ShaderMaterial,
    UniformsUtils,
    Vector3,
    Vector4,
    WebGLRenderTarget,
    HalfFloatType,
} from "three";

class Reflector extends Mesh {
    constructor(geometry, options = {}) {
        super(geometry);

        this.isReflector = true;

        this.type = "Reflector";
        this.camera = new PerspectiveCamera();

        const scope = this;

        const color =
            options.color !== undefined
                ? new Color(options.color)
                : new Color(0x7f7f7f);
        const textureWidth = options.textureWidth || 512;
        const textureHeight = options.textureHeight || 512;
        const clipBias = options.clipBias || 0;
        const shader = options.shader || Reflector.ReflectorShader;
        const multisample =
            options.multisample !== undefined ? options.multisample : 4;

        //

        const reflectorPlane = new Plane();
        const normal = new Vector3();
        const reflectorWorldPosition = new Vector3();
        const cameraWorldPosition = new Vector3();
        const rotationMatrix = new Matrix4();
        const lookAtPosition = new Vector3(0, 0, -1);
        const clipPlane = new Vector4();

        const view = new Vector3();
        const target = new Vector3();
        const q = new Vector4();

        const textureMatrix = new Matrix4();
        const virtualCamera = this.camera;

        const renderTarget = new WebGLRenderTarget(
            textureWidth,
            textureHeight,
            { samples: multisample, type: HalfFloatType }
        );

        const material = new ShaderMaterial({
            name: shader.name !== undefined ? shader.name : "unspecified",
            uniforms: UniformsUtils.clone(shader.uniforms),
            fragmentShader: shader.fragmentShader,
            vertexShader: shader.vertexShader,
            transparent: true,
        });

        material.uniforms["tDiffuse"].value = renderTarget.texture;
        material.uniforms["_opacity"].value = options.opacity || 1;
        material.uniforms["color"].value = color;
        material.uniforms["textureMatrix"].value = textureMatrix;

        this.material = material;
        this.count = 0;
        this.onBeforeRender = (renderer, scene, camera) => {
            this.count++;

            // if (this.count % 4 === 0) {
            //     return;
            // }

            reflectorWorldPosition.setFromMatrixPosition(scope.matrixWorld);
            cameraWorldPosition.setFromMatrixPosition(camera.matrixWorld);

            rotationMatrix.extractRotation(scope.matrixWorld);

            normal.set(0, 0, 1);
            normal.applyMatrix4(rotationMatrix);

            view.subVectors(reflectorWorldPosition, cameraWorldPosition);

            // Avoid rendering when reflector is facing away

            if (view.dot(normal) > 0) return;

            view.reflect(normal).negate();
            view.add(reflectorWorldPosition);

            rotationMatrix.extractRotation(camera.matrixWorld);

            lookAtPosition.set(0, 0, -1);
            lookAtPosition.applyMatrix4(rotationMatrix);
            lookAtPosition.add(cameraWorldPosition);

            target.subVectors(reflectorWorldPosition, lookAtPosition);
            target.reflect(normal).negate();
            target.add(reflectorWorldPosition);

            virtualCamera.position.copy(view);
            virtualCamera.up.set(0, 1, 0);
            virtualCamera.up.applyMatrix4(rotationMatrix);
            virtualCamera.up.reflect(normal);
            virtualCamera.lookAt(target);

            virtualCamera.far = camera.far; // Used in WebGLBackground

            virtualCamera.updateMatrixWorld();
            virtualCamera.projectionMatrix.copy(camera.projectionMatrix);

            // Update the texture matrix
            textureMatrix.set(
                0.5,
                0.0,
                0.0,
                0.5,
                0.0,
                0.5,
                0.0,
                0.5,
                0.0,
                0.0,
                0.5,
                0.5,
                0.0,
                0.0,
                0.0,
                1.0
            );
            textureMatrix.multiply(virtualCamera.projectionMatrix);
            textureMatrix.multiply(virtualCamera.matrixWorldInverse);
            textureMatrix.multiply(scope.matrixWorld);

            // Now update projection matrix with new clip plane, implementing code from: http://www.terathon.com/code/oblique.html
            // Paper explaining this technique: http://www.terathon.com/lengyel/Lengyel-Oblique.pdf
            reflectorPlane.setFromNormalAndCoplanarPoint(
                normal,
                reflectorWorldPosition
            );
            reflectorPlane.applyMatrix4(virtualCamera.matrixWorldInverse);

            clipPlane.set(
                reflectorPlane.normal.x,
                reflectorPlane.normal.y,
                reflectorPlane.normal.z,
                reflectorPlane.constant
            );

            const projectionMatrix = virtualCamera.projectionMatrix;

            q.x =
                (Math.sign(clipPlane.x) + projectionMatrix.elements[8]) /
                projectionMatrix.elements[0];
            q.y =
                (Math.sign(clipPlane.y) + projectionMatrix.elements[9]) /
                projectionMatrix.elements[5];
            q.z = -1.0;
            q.w =
                (1.0 + projectionMatrix.elements[10]) /
                projectionMatrix.elements[14];

            // Calculate the scaled plane vector
            clipPlane.multiplyScalar(2.0 / clipPlane.dot(q));

            // Replacing the third row of the projection matrix
            projectionMatrix.elements[2] = clipPlane.x;
            projectionMatrix.elements[6] = clipPlane.y;
            projectionMatrix.elements[10] = clipPlane.z + 1.0 - clipBias;
            projectionMatrix.elements[14] = clipPlane.w;

            // Render
            scope.visible = false;

            const currentRenderTarget = renderer.getRenderTarget();

            const currentXrEnabled = renderer.xr.enabled;
            const currentShadowAutoUpdate = renderer.shadowMap.autoUpdate;

            renderer.xr.enabled = false; // Avoid camera modification
            renderer.shadowMap.autoUpdate = false; // Avoid re-computing shadows

            renderer.setRenderTarget(renderTarget);

            renderer.state.buffers.depth.setMask(true); // make sure the depth buffer is writable so it can be properly cleared, see #18897

            if (renderer.autoClear === false) renderer.clear();

            // filter

            options.filter.forEach((name) => {
                const mesh = scene.getObjectByName(name);
                mesh.visible = false;
            });

            renderer.render(scene, virtualCamera);

            options.filter.forEach((name) => {
                const mesh = scene.getObjectByName(name);
                mesh.visible = true;
            });

            renderer.xr.enabled = currentXrEnabled;
            renderer.shadowMap.autoUpdate = currentShadowAutoUpdate;

            renderer.setRenderTarget(currentRenderTarget);

            // Restore viewport

            const viewport = camera.viewport;

            if (viewport !== undefined) {
                renderer.state.viewport(viewport);
            }

            scope.visible = true;
        };

        this.getRenderTarget = function () {
            return renderTarget;
        };

        this.dispose = function () {
            renderTarget.dispose();
            scope.material.dispose();
        };
    }
}

Reflector.ReflectorShader = {
    name: "ReflectorShader",

    uniforms: {
        color: {
            value: null,
        },

        tDiffuse: {
            value: null,
        },

        textureMatrix: {
            value: null,
        },
        _opacity: {
            value: null,
        },
    },

    vertexShader: /* glsl */ `
		uniform mat4 textureMatrix;
		varying vec4 vUv;

		#include <common>
		#include <logdepthbuf_pars_vertex>

		void main() {

			vUv = textureMatrix * vec4( position, 1.0 );

			gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );

			#include <logdepthbuf_vertex>

		}`,

    fragmentShader: /* glsl */ `
		uniform vec3 color;
		uniform float _opacity;
		uniform sampler2D tDiffuse;
		varying vec4 vUv;

		#include <logdepthbuf_pars_fragment>

		float blendOverlay( float base, float blend ) {

			return( base < 0.5 ? ( 2.0 * base * blend ) : ( 1.0 - 2.0 * ( 1.0 - base ) * ( 1.0 - blend ) ) );

		}

		vec3 blendOverlay( vec3 base, vec3 blend ) {

			return vec3( blendOverlay( base.r, blend.r ), blendOverlay( base.g, blend.g ), blendOverlay( base.b, blend.b ) );

		}

		void main() {

			#include <logdepthbuf_fragment>

			vec4 base = texture2DProj( tDiffuse, vUv );
			gl_FragColor = vec4( base.rgb , 0.1 );

			#include <tonemapping_fragment>
			#include <colorspace_fragment>

		}`,
};

export { Reflector };

/**
 * @description: 为mesh的材质增加反光效果
 * @param {*} mesh
 * @return {*}
 */
export function addReflectorEffect(mesh, options = { filter: [] }) {
    const material = mesh.material;

    // material.isReflector = true;

    // material.type = "Reflector";
    const camera = new PerspectiveCamera();

    const textureWidth = options.textureWidth || 512;
    const textureHeight = options.textureHeight || 512;
    const clipBias = options.clipBias || 0;
    const shader = options.shader || Reflector.ReflectorShader;
    const multisample =
        options.multisample !== undefined ? options.multisample : 4;

    const reflectorPlane = new Plane();
    const normal = new Vector3();
    const reflectorWorldPosition = new Vector3();
    const cameraWorldPosition = new Vector3();
    const rotationMatrix = new Matrix4();
    const lookAtPosition = new Vector3(0, 0, -1);
    const clipPlane = new Vector4();

    const view = new Vector3();
    const target = new Vector3();
    const q = new Vector4();

    const textureMatrix = new Matrix4();
    const virtualCamera = camera;

    const renderTarget = new WebGLRenderTarget(textureWidth, textureHeight, {
        samples: multisample,
        type: HalfFloatType,
    });

    const appendUniforms = {
        refDiffuse: { value: renderTarget.texture },
        // refOpacity: { value: options.opacity || 1 },
        refTextureMatrix: { value: textureMatrix },
    };

    material.onBeforeCompile = (shader) => {
        console.log(shader);
        Object.assign(shader.uniforms, appendUniforms);
        shader.vertexShader = shader.vertexShader.replace(
            "#include <common>",
            `
            #include <common>
            uniform mat4 refTextureMatrix;
            varying vec4 refUv;
        `
        );
        shader.fragmentShader = shader.fragmentShader.replace(
            "#include <common>",
            `
            #include <common>
            uniform sampler2D refDiffuse;
            varying vec4 refUv;
        `
        );
        shader.vertexShader = shader.vertexShader.replace(
            "#include <begin_vertex>",
            `
            #include <begin_vertex>
            refUv = refTextureMatrix * vec4( position, 1.0 );
        `
        );
        shader.fragmentShader = shader.fragmentShader.replace(
            "#include <dithering_fragment>",
            `
            #include <dithering_fragment>
            
            gl_FragColor.rgb += texture2DProj( refDiffuse, refUv ).rgb;
			gl_FragColor.a =  ${options.opacity || "1.0"};

        `
        );
        // uniform sampler2D refDiffuse;
        // varying vec4 vUv;
        // console.log(shader.fragmentShader);
    };

    mesh.material.onBeforeRender = (renderer, scene, camera) => {
        reflectorWorldPosition.setFromMatrixPosition(mesh.matrixWorld);
        cameraWorldPosition.setFromMatrixPosition(camera.matrixWorld);

        rotationMatrix.extractRotation(mesh.matrixWorld);

        normal.set(0, 0, 1);
        normal.applyMatrix4(rotationMatrix);

        view.subVectors(reflectorWorldPosition, cameraWorldPosition);

        // Avoid rendering when reflector is facing away

        if (view.dot(normal) > 0) return;

        view.reflect(normal).negate();
        view.add(reflectorWorldPosition);

        rotationMatrix.extractRotation(camera.matrixWorld);

        lookAtPosition.set(0, 0, -1);
        lookAtPosition.applyMatrix4(rotationMatrix);
        lookAtPosition.add(cameraWorldPosition);

        target.subVectors(reflectorWorldPosition, lookAtPosition);
        target.reflect(normal).negate();
        target.add(reflectorWorldPosition);

        virtualCamera.position.copy(view);
        virtualCamera.up.set(0, 1, 0);
        virtualCamera.up.applyMatrix4(rotationMatrix);
        virtualCamera.up.reflect(normal);
        virtualCamera.lookAt(target);

        virtualCamera.far = camera.far; // Used in WebGLBackground

        virtualCamera.updateMatrixWorld();
        virtualCamera.projectionMatrix.copy(camera.projectionMatrix);

        // Update the texture matrix
        textureMatrix.set(
            0.5,
            0.0,
            0.0,
            0.5,
            0.0,
            0.5,
            0.0,
            0.5,
            0.0,
            0.0,
            0.5,
            0.5,
            0.0,
            0.0,
            0.0,
            1.0
        );
        textureMatrix.multiply(virtualCamera.projectionMatrix);
        textureMatrix.multiply(virtualCamera.matrixWorldInverse);
        textureMatrix.multiply(mesh.matrixWorld);

        // Now update projection matrix with new clip plane, implementing code from: http://www.terathon.com/code/oblique.html
        // Paper explaining this technique: http://www.terathon.com/lengyel/Lengyel-Oblique.pdf
        reflectorPlane.setFromNormalAndCoplanarPoint(
            normal,
            reflectorWorldPosition
        );
        reflectorPlane.applyMatrix4(virtualCamera.matrixWorldInverse);

        clipPlane.set(
            reflectorPlane.normal.x,
            reflectorPlane.normal.y,
            reflectorPlane.normal.z,
            reflectorPlane.constant
        );

        const projectionMatrix = virtualCamera.projectionMatrix;

        q.x =
            (Math.sign(clipPlane.x) + projectionMatrix.elements[8]) /
            projectionMatrix.elements[0];
        q.y =
            (Math.sign(clipPlane.y) + projectionMatrix.elements[9]) /
            projectionMatrix.elements[5];
        q.z = -1.0;
        q.w =
            (1.0 + projectionMatrix.elements[10]) /
            projectionMatrix.elements[14];

        // Calculate the scaled plane vector
        clipPlane.multiplyScalar(2.0 / clipPlane.dot(q));

        // Replacing the third row of the projection matrix
        projectionMatrix.elements[2] = clipPlane.x;
        projectionMatrix.elements[6] = clipPlane.y;
        projectionMatrix.elements[10] = clipPlane.z + 1.0 - clipBias;
        projectionMatrix.elements[14] = clipPlane.w;

        // Render
        // TODO : 于一体的反光 不能将自己隐去 只是不显示反射纹理
        mesh.visible = false;

        const currentRenderTarget = renderer.getRenderTarget();

        const currentXrEnabled = renderer.xr.enabled;
        const currentShadowAutoUpdate = renderer.shadowMap.autoUpdate;

        renderer.xr.enabled = false; // Avoid camera modification
        renderer.shadowMap.autoUpdate = false; // Avoid re-computing shadows

        renderer.setRenderTarget(renderTarget);

        renderer.state.buffers.depth.setMask(true); // make sure the depth buffer is writable so it can be properly cleared, see #18897

        if (renderer.autoClear === false) renderer.clear();

        // filter

        options.filter.forEach((name) => {
            const mesh = scene.getObjectByName(name);
            mesh.visible = false;
        });

        renderer.render(scene, virtualCamera);

        options.filter.forEach((name) => {
            const mesh = scene.getObjectByName(name);
            mesh.visible = true;
        });

        renderer.xr.enabled = currentXrEnabled;
        renderer.shadowMap.autoUpdate = currentShadowAutoUpdate;

        renderer.setRenderTarget(currentRenderTarget);

        // Restore viewport

        const viewport = camera.viewport;

        if (viewport !== undefined) {
            renderer.state.viewport(viewport);
        }

        mesh.visible = true;
    };
}

本文来自互联网用户投稿,该文观点仅代表作者本人,不代表本站立场。本站仅提供信息存储空间服务,不拥有所有权,不承担相关法律责任。如若转载,请注明出处:http://www.coloradmin.cn/o/1389185.html

如若内容造成侵权/违法违规/事实不符,请联系多彩编程网进行投诉反馈,一经查实,立即删除!

相关文章

QT DAY6作业

1.学生管理系统&#xff0c;基于QT的数据库中数据表的增删改查 头文件 #ifndef WIDGET_H #define WIDGET_H#include <QWidget> #include <QSqlDatabase> //数据库管理类 #include <QSqlQuery> //执行sql语句类 #include <QSqlRecord> //数据库记…

强化学习(二)多臂老虎机 “Multi-armed Bandits”——2

1、增量算法估计动作价值 由之前的内容可知&#xff0c;某一个动作被选择 n − 1 n-1 n−1 次后&#xff0c;该动作的价值估计值为 Q n ≐ R 1 R 2 ⋯ R n − 1 n − 1 Q_n\doteq\dfrac{R_1R_2\cdotsR_{n-1}}{n-1} Qn​≐n−1R1​R2​⋯Rn−1​​ 很明显&#xff0c;随着…

【AIGC-文本/图片生成视频系列-10】SparseCtrl:在文本生成视频的扩散模型中添加稀疏控制

目录 一. 项目概述 二. 方法详解 三. 应用结果 四.个人思考 由于扩散模型生成空间的不确定性&#xff0c;仅仅通过文本生成视频时&#xff0c;会导致模糊的视频帧生成。 今天解析的SparseCtrl&#xff0c;是一种有效解决上述问题的方案&#xff0c;通过带有附加编码器的时…

为啥4位单片机那么LOW,还没被淘汰?

我以为8位的51单片机已经够低端了&#xff0c;没想到竟然还有4位单片机&#xff01;它到底有什么优势&#xff1f;用在什么领域&#xff1f; 在刚开始学习单片机的时候&#xff0c;我一直以为用越高端的芯片就越牛逼。 而现实&#xff0c;公司做产品和我们单片机学习者&#xf…

12V充4.2V单节锂电池充电管理芯片

12V充4.2V单节锂电池充电管理芯片&#xff1a;高效、简单、实用的充电解决方案 随着智能设备的普及&#xff0c;电池充电管理已经成为一个热门话题。本文将介绍一款12V到4.2V*UIC9527单节锂电池充电管理芯片&#xff0c;它具有简单、高效、实用等特点&#xff0c;广泛应用于各…

spring boot学习第八篇:kafka

目录 1、安装kafka 1.1确认jdk是否安装OK 1.2下载kafka 1.3安装kafka 1.4验证kafka 2、连接kafka 3、在java中操作kafka 1、安装kafka 1.1确认jdk是否安装Ok java -version 1.2下载kafka wget http://archive.apache.org/dist/zookeeper/zookeeper-3.4.14/zookeeper-…

RTSP协议播放不兼容TPLINK摄像头的处理办法

报错的内容是Number of element invalid in origin string. 位置如图 数据内容是 "- 14665860 31787219 1 IN IP4 172.16.108.70" 两个数字中间多了一个空格&#xff0c;导致判断数据不等于6 所以数据输入的时候把中间的空格去掉一个即可。 if (array.Length …

【K8S 】K8S配置资源管理

一、Secret&#xff1a; 1、概念 用来保存密码。token&#xff0c;敏感的K8S资源 这类数据可以直接存放在镜像中&#xff0c;但是放在Secret中可以更方便的控制&#xff0c;减少暴露的风险 Secret&#xff1a;保存加密的信息 2、Secret类型&#xff1a; docker-registry&am…

如何有效构建进攻性的网络安全防护策略

文章目录 前言一、进攻性安全策略的价值&#xff08;一&#xff09;进攻性安全和防御性安全的区别&#xff08;二&#xff09;进攻性安全带来一种新的测试和防御的方法&#xff08;三&#xff09;进攻性安全策略也比防御性安全策略更具前瞻性 二、进攻性安全策略的类型&#xf…

Transformer 位置编码

✅作者简介&#xff1a;人工智能专业本科在读&#xff0c;喜欢计算机与编程&#xff0c;写博客记录自己的学习历程。 &#x1f34e;个人主页&#xff1a;小嗷犬的个人主页 &#x1f34a;个人网站&#xff1a;小嗷犬的技术小站 &#x1f96d;个人信条&#xff1a;为天地立心&…

LeetCode刷题---随机链表的复制

解题思路&#xff1a; 使用哈希表来解决该问题 因为题中要求是深拷贝 首先对原链表遍历&#xff0c;将原链表每个节点和新链表每个节点形成对应关系&#xff0c;存入到哈希表中&#xff0c;key为原链表的节点&#xff0c;value为新链表的节点。 之后重置辅助链表指向原链表头节…

Jmemter

一、背景 有时候需要自己观察某些接口在一定并发下处理能力如果&#xff0c;那么Jmeter是一个很好的工具。 我所需要测试的接口是http接口&#xff0c;通过postman就可以发起请求&#xff0c;但postman单笔请求太慢&#xff0c;需要持续给接口一定压力&#xff0c;那么需要用J…

Docker登录MySQL,密码正确却提示密码错误

当我输入了正确的MySQL密码的时候确提示我密码错误&#xff1a; ERROR 1045 (28000): Access denied for user rootlocalhost (using password: YES) docker run --name mysql_master \ -e MYSQL_ROOT_PASSWORD123123 \ -v /root/mysql_master/data:/var/lib/mysql \ -v /root…

【电商API】DIY网络爬虫收集电商数据

DIY网络爬虫收集电商数据 网络爬虫是最常见和使用最广泛的数据收集方法。DIY网络爬虫确实需要一些编程知识&#xff0c;但整个过程比一开始看起来要简单得多。 当然&#xff0c;爬虫的有效性取决于许多因素&#xff0c;例如目标的难度、网站方的反爬虫措施等。如果将网络抓取用…

Spring IOC 源码分析

​ 什么是 IoC IoC &#xff08;Inversion of control &#xff09;控制反转。它是一种思想不是一个技术实现。描述的是&#xff1a;Java开发领域对象的创建以及管理的问题。 例如&#xff1a;现有类A依赖于类B。传统的开发方式 &#xff1a;往往是在类A中手动通过new关键字…

华为埋头造车,躺赚的却是黄牛?

文 | AUTO芯球 作者 | 雷歌 华为和赛力斯正在重庆哼哧a哼哧建厂造车&#xff0c;黄牛却在网上倒卖订单躺着赚钱。 前两天雷歌刚去试驾了问界M9&#xff0c;现场一车难求。 今天回来一看&#xff0c;好家伙&#xff0c;咸鱼上&#xff0c;黄牛们大量倒卖M9的大定订单&#x…

2024年机器人和人工智能将通过4种方式改变行业

文 | BFT机器人 前言&#xff1a; 2023年是人工智能界充满创造性和突破性的一年&#xff0c;包括生成式人工智能在内的人工智能 (AI) 技术的出现引起了全球的关注并占据了头条新闻。然而&#xff0c;生成式人工智能在企业中的应用仍处于早期阶段&#xff0c;如何最好地利用这项…

Markdown 时序图绘制详解

✍️作者简介&#xff1a;小北编程&#xff08;专注于HarmonyOS、Android、Java、Web、TCP/IP等技术方向&#xff09; &#x1f433;博客主页&#xff1a; 开源中国、稀土掘金、51cto博客、博客园、知乎、简书、慕课网、CSDN &#x1f514;如果文章对您有一定的帮助请&#x1f…

消费增值模式:从五折购物到利润共享的商业逻辑

在当今的商业环境中&#xff0c;消费者和商家之间的关系已经不再是简单的买卖。如何让消费者在购物的同时&#xff0c;也能享受到更多的价值&#xff0c;成为了各大平台争相探索的焦点。近日&#xff0c;一种名为“消费增值模式”的创新逻辑正在引起广泛的关注。这一模式以产品…

iOS UI掉帧和卡顿优化解决方案记录

UI卡顿原理 在 VSync 信号到来后&#xff0c;系统图形服务会通过 CADisplayLink 等机制通知 App&#xff0c;App 主线程开始在 CPU 中计算显示内容&#xff0c;比如视图的创建、布局计算、图片解码、文本绘制等。随后 CPU 会将计算好的内容提交到 GPU 去&#xff0c;由 GPU 进行…