日撸java_day63-65

news2024/11/25 15:24:47

文章目录

  • Booster
    • 代码
    • 运行截图

Booster

代码

package machineLearning.adaboosting;

import weka.core.Instances;

import java.io.FileReader;
import java.util.Arrays;

/**
 * ClassName: WeightedInstances
 * Package: machineLearning.adaboosting
 * Description:Weighted instances.
 *
 * @Author: luv_x_c
 * @Create: 2023/8/13 15:12
 */
public class WeightedInstances extends Instances {
    /**
     * Just the requirement of some classes, any number is ok.
     */
    private static final long serialVersionUID = 11087456L;

    /**
     * Weights.
     */
    private double[] weights;

    /**
     * The first constructor.
     *
     * @param paraFileReader The given reader to read data from file.
     */
    public WeightedInstances(FileReader paraFileReader) throws Exception {
        super(paraFileReader);
        setClassIndex(numAttributes() - 1);

        //Initialize weights.
        weights = new double[numInstances()];
        double tempAverage = 1.0 / numInstances();
        Arrays.fill(weights, tempAverage);
        System.out.println("Instances weights are: " + Arrays.toString(weights));
    }//Of the first constructor

    /**
     * The second constructor.
     *
     * @param paraInstances The given instances.
     */
    public WeightedInstances(Instances paraInstances) {
        super(paraInstances);
        setClassIndex(numAttributes() - 1);

        // Initialize weights.
        weights = new double[numInstances()];
        double tempAverage = 1.0 / numInstances();
        Arrays.fill(weights, tempAverage);
        System.out.println("Instances weights are: " + Arrays.toString(weights));
    }//Of the second constructor

    /**
     * Getter.
     *
     * @param paraIndex The given index.
     * @return The weight of the given index.
     */
    public double getWeight(int paraIndex) {
        return weights[paraIndex];
    }//Of getWeight

    /**
     * Adjust the weights.
     *
     * @param paraCorrectArray Indicate which instance have been correctly classify.
     * @param paraAlpha        The weight of the last classifier.
     */
    public void adjustWeights(boolean[] paraCorrectArray, double paraAlpha) {
        //Step1. Calculate alpha.
        double tempIncrease = Math.exp(paraAlpha);

        // Step2. Adjust.
        double tempWeightsSum = 0;
        for (int i = 0; i < weights.length; i++) {
            if (paraCorrectArray[i]) {
                weights[i] /= tempIncrease;
            } else {
                weights[i] *= tempIncrease;
            }//Of if
            tempWeightsSum += weights[i];
        }//Of for i

        // Step3. Normalize.
        for (int i = 0; i < weights.length; i++) {
            weights[i] /= tempWeightsSum;
        }//Of for i

        System.out.println("After adjusting, instances weights are: " + Arrays.toString(weights));
    }//Of adjustWeights

    /**
     * Test the method.
     */
    public void adjustWeightsTest() {
        boolean[] tempCorrectArray = new boolean[numInstances()];
        Arrays.fill(tempCorrectArray, true);

        double tempWeightError = 0.3;

        adjustWeights(tempCorrectArray, tempWeightError);

        System.out.println("After adjusting: ");

        System.out.println(toString());
    }// Of adjustWeightsTest

    @Override
    public String toString() {
        String resultString =
                "I am a weighted Instances object.\r\n" + " I have " + numInstances() + " " +
                        "instances and " + (numAttributes() - 1) + " conditional attributes.\r\n" +
                        " My weights are: " + Arrays.toString(weights) + "\r\n" +
                        " My data are:\r\n" + super.toString();
        return resultString;
    }//Of toString

    /**
     * @param args Not provided.
     */
    public static void main(String[] args) {
        WeightedInstances tempWeightInstances = null;
        String tempFileName = "E:\\java_code\\data\\sampledata\\iris.arff";

        try {
            FileReader fileReader = new FileReader(tempFileName);
            tempWeightInstances = new WeightedInstances(fileReader);
            fileReader.close();
        } catch (Exception ee) {
            System.out.println("Cannot read the file: " + tempFileName + "\r\n" + ee);
            System.exit(0);
        }//Of try

        System.out.println(tempWeightInstances.toString());

        tempWeightInstances.adjustWeightsTest();
    }//Of main
}//OF class WeightedInstances

package machineLearning.adaboosting;

import weka.core.Instance;

import java.io.FileReader;
import java.util.Arrays;

/**
 * ClassName: StumpClassifier
 * Package: machineLearning.adaboosting
 * Description:The stump classifier.
 *
 * @Author: luv_x_c
 * @Create: 2023/8/17 20:37
 */
public class StumpClassifier extends SimpleClassifier {
    /**
     * The best cut for the current attribute on weightInstances.
     */
    double bestCut;

    /**
     * The class label for attribute value less than bestCut.
     */
    int leftLeafLabel;

    /**
     * The class label for attribute value no less than bestCut.
     */
    int rightLeafLabel;

    /**
     * The only constructor.
     *
     * @param paraWeightedInstances The given instances.
     */
    public StumpClassifier(WeightedInstances paraWeightedInstances) {
        super(paraWeightedInstances);
    }//Of the only constructor

    public void train() {
        //Step1. Randomly choose an attribute.
        selectedAttribute = random.nextInt(numConditions);

        //Step2. Find all attributes values and sort.
        double[] tempValuesArray = new double[numInstances];
        for (int i = 0; i < tempValuesArray.length; i++) {
            tempValuesArray[i] = weightedInstances.instance(i).value(selectedAttribute);
        }//Of for i
        Arrays.sort(tempValuesArray);

        //Step3. Initialize, classify all instances as the same with the original cut.
        int tempNumLabels = numClasses;
        double[] tempLabelCountArray = new double[tempNumLabels];
        int tempCurrentLabel;

        //Step3.1 Scan all labels to obtain their counts.
        for (int i = 0; i < numInstances; i++) {
            // The label of the ith instance
            tempCurrentLabel = (int) weightedInstances.instance(i).classValue();
            tempLabelCountArray[tempCurrentLabel] += weightedInstances.getWeight(i);
        }//Of for i

        //Step3.2 Find the label with the maximal count.
        double tempMaxCorrect = 0;
        int tempBestLabel = -1;
        for (int i = 0; i < tempLabelCountArray.length; i++) {
            if (tempMaxCorrect < tempLabelCountArray[i]) {
                tempMaxCorrect = tempLabelCountArray[i];
                tempBestLabel = i;
            }//Of if
        }//Of for i

        //Steep3.3 The cut is a little  smaller than the minimal value.
        bestCut = tempValuesArray[0] - 0.1;
        leftLeafLabel = tempBestLabel;
        rightLeafLabel = tempBestLabel;

        // Step4. Check candidate cuts one by one.
        // Step4.1 To handle multi-class data, left and right.
        double tempCut;
        double[][] tempLabelCountMatrix = new double[2][tempNumLabels];

        for (int i = 0; i < tempValuesArray.length - 1; i++) {
            // Step4.1 Some attribute values are identical, ignore them.
            if (tempValuesArray[i] == tempValuesArray[i + 1]) {
                continue;
            }//Of if
            tempCut = (tempValuesArray[i] + tempValuesArray[i + 1]) / 2;

            // Step4.2 Scan all labels to obtain their counts wrt, the cut .
            // Initialize again since it is used many times.
            for (int j = 0; j < 2; j++) {
                for (int k = 0; k < tempNumLabels; k++) {
                    tempLabelCountMatrix[j][k] = 0;
                }//Of for k
            }//Of for j

            for (int j = 0; j < numInstances; j++) {
                // The label of the jth instance.
                tempCurrentLabel = (int) weightedInstances.instance(j).classValue();
                if (weightedInstances.instance(j).value(selectedAttribute) < tempCut) {
                    tempLabelCountMatrix[0][tempCurrentLabel] += weightedInstances.getWeight(j);
                } else {
                    tempLabelCountMatrix[1][tempCurrentLabel] += weightedInstances.getWeight(j);
                }//Of if
            }//Of for j

            // Step4.3 Left leaf
            double tempLeftMaxCorrect = 0;
            int tempLeftBestLabel = 0;
            for (int j = 0; j < tempLabelCountMatrix[0].length; j++) {
                if (tempLeftMaxCorrect < tempLabelCountMatrix[0][j]) {
                    tempLeftMaxCorrect = tempLabelCountMatrix[0][j];
                    tempLeftBestLabel = j;
                }//Of if
            }//Of for j

            // Step 4.4 Right leaf
            double tempRightMaxCorrect = 0;
            int tempRightBestLabel = 0;
            for (int j = 0; j < tempLabelCountMatrix[0].length; j++) {
                if (tempRightMaxCorrect < tempLabelCountMatrix[1][j]) {
                    tempRightMaxCorrect = tempLabelCountMatrix[1][j];
                    tempRightBestLabel = j;
                }//Of if
            }//Of for j

            // Step 4.5 Compare with the current best
            if (tempMaxCorrect < tempLeftMaxCorrect + tempRightMaxCorrect) {
                tempMaxCorrect = tempLeftMaxCorrect + tempRightMaxCorrect;
                bestCut = tempCut;
                leftLeafLabel = tempLeftBestLabel;
                rightLeafLabel = tempRightBestLabel;
            }//Of if
        }//Of for i

        System.out.println("Attribute = " + selectedAttribute + ", cut =" + bestCut + ", " +
                "leftLeafLabel = " + leftLeafLabel + ", rightLeafLabel" + rightLeafLabel);
    }//Of train


    @Override
    public int classify(Instance paraInstance) {
        int resultLabel = -1;
        if (paraInstance.value(selectedAttribute) < bestCut) {
            resultLabel = leftLeafLabel;
        } else {
            resultLabel = rightLeafLabel;
        }//Of if
        return resultLabel;
    }//Of classify

    @Override
    public String toString() {
        return "I am a stump classifier.\r\n" + "I choose attribute #" + selectedAttribute
                + " with cut value " + bestCut + ".\r\n" + "The left and right leaf labels are " + leftLeafLabel
                + " and " + rightLeafLabel + ", respectively.\r\n" + "My weighted error is: " + computeWeightedError()
                + ".\r\n" + "My weighted accuracy is : " + computeTrainingAccuracy() + ".";
    }//Of toString

    /**
     * For unit test.
     *
     * @param args Not used.
     */
    public static void main(String[] args) {
        WeightedInstances tempWeightedInstance = null;
        String tempFileName = "E:\\java_code\\data\\sampledata\\iris.arff";
        try {
            FileReader fileReader = new FileReader(tempFileName);
            tempWeightedInstance = new WeightedInstances(fileReader);
            fileReader.close();
        } catch (Exception e) {
            System.out.println("Cannot read the file: " + tempFileName + "\r\n" + e);
            System.exit(0);
        }//OF try

        StumpClassifier tempClassifier = new StumpClassifier(tempWeightedInstance);
        tempClassifier.train();
        System.out.println(tempClassifier);

        System.out.println(Arrays.toString(tempClassifier.computeCorrectnessArray()));
    }//OF main
}//Of class StumpClassifier

package machineLearning.adaboosting;

import weka.core.Instance;

import java.util.Random;

/**
 * ClassName: SimpleClassifier
 * Package: machineLearning.adaboosting
 * Description:The super class of any simple classifier.
 *
 * @Author: luv_x_c
 * @Create: 2023/8/14 13:43
 */
public abstract class SimpleClassifier {
    /**
     * The index of the current attribute.
     */
    int selectedAttribute;

    /**
     * Weighted data.
     */
    WeightedInstances weightedInstances;

    /**
     * The accuracy on the training set.
     */
    double trainingAccuracy;

    /**
     * The number of instances.
     */
    int numInstances;

    /**
     * The number of instances.
     */
    int numClasses;

    /**
     * The number of conditional attributes.
     */
    int numConditions;

    Random random = new Random();

    /**
     * The first constructor.
     *
     * @param paraWeightedInstances The given instances.
     */
    public SimpleClassifier(WeightedInstances paraWeightedInstances) {
        weightedInstances = paraWeightedInstances;

        numConditions = weightedInstances.numAttributes() - 1;
        numInstances = weightedInstances.numInstances();
        numClasses = weightedInstances.classAttribute().numValues();
    }// Of the first constructor

    /**
     * Train the classifier.
     */
    public abstract void train();

    /**
     * Classify an instance.
     *
     * @param paraInstance The given instance.
     * @return Predicted label.
     */
    public abstract int classify(Instance paraInstance);

    /**
     * Which instance in the  training set are correctly classified.
     *
     * @return The correctness array.
     */
    public boolean[] computeCorrectnessArray() {
        boolean[] resultCorrectnessArray = new boolean[weightedInstances.numInstances()];
        for (int i = 0; i < resultCorrectnessArray.length; i++) {
            Instance tempInstance = weightedInstances.instance(i);
            if ((int) (tempInstance.classValue()) == classify(tempInstance)) {
                resultCorrectnessArray[i] = true;
            }// OF if
        }//Of for i

        return resultCorrectnessArray;
    }//Of computeCorrectnessArray

    /**
     * Compute the accuracy on the training set.
     *
     * @return The training accuracy.
     */
    public double computeTrainingAccuracy() {
        double tempCorrect = 0;
        boolean[] tempCorrectnessArray = computeCorrectnessArray();
        for (int i = 0; i < tempCorrectnessArray.length; i++) {
            if (tempCorrectnessArray[i]) {
                tempCorrect++;
            }//Of if
        }//Of for i

        double resultAccuracy = tempCorrect / tempCorrectnessArray.length;

        return resultAccuracy;
    }//Of computeTrainingAccuracy

    public double computeWeightedError(){
        double resultError=0;
        boolean[]tempCorrectnessArray=computeCorrectnessArray();
        for (int i = 0; i < tempCorrectnessArray.length; i++) {
            if(!tempCorrectnessArray[i]){
                resultError+=weightedInstances.getWeight(i);
            }//Of if
        }//Of for i

        if(resultError<1e-6){
            resultError=1e-6;
        }//Of if

        return resultError;
    }//Of computeWeightedError
}//Of class SimpleClassifier
package machineLearning.adaboosting;

import weka.core.Instance;
import weka.core.Instances;

import java.io.FileReader;
import java.io.IOException;

/**
 * ClassName: Booster
 * Package: machineLearning.adaboosting
 * Description: The booster which ensembles base classifiers.
 *
 * @Author: luv_x_c
 * @Create: 2023/8/18 14:34
 */
public class Booster {
    /**
     * Classifiers.
     */
    SimpleClassifier[] classifiers;

    /**
     * Number of classifiers.
     */
    int numClassifiers;

    /**
     * Whether stop after the training error is 0.
     */
    boolean stopAfterConverge = false;

    /**
     * The weights of classifier.
     */
    double[] classifierWeights;

    /**
     * The training data.
     */
    Instances trainingData;

    /**
     * The testing data.
     */
    Instances testingData;

    /**
     * The first constructor. The testing set is the same as the training data.
     *
     * @param paraTrainingFileName The data file name.
     */
    public Booster(String paraTrainingFileName) {
        // Step1. Read the training set.
        try {
            FileReader fileReader = new FileReader(paraTrainingFileName);
            trainingData = new Instances(fileReader);
            fileReader.close();
        } catch (IOException e) {
            System.out.println("Cannot read the file: " + paraTrainingFileName + "\r\n" + e);
            System.exit(0);
        }//Of try

        // Step2. Set the last attribute as the class index
        trainingData.setClassIndex(trainingData.numAttributes() - 1);

        // Step3. The testing data is the same as the training data
        testingData = trainingData;

        stopAfterConverge = true;

        System.out.println("****************Data**********\r\n" + trainingData);
    }//Of the first constructor

    /**
     * Set the number of base classifier, and allocate space for them.
     *
     * @param paraNumBaseClassifiers The number of base classifier.
     */
    public void setNumBaseClassifiers(int paraNumBaseClassifiers) {
        numClassifiers = paraNumBaseClassifiers;

        // Step1. Allocate space for classifiers
        classifiers = new SimpleClassifier[numClassifiers];

        // Step2. Initialize classifier weights
        classifierWeights = new double[numClassifiers];
    }//Of setNumBaseClassifiers

    /**
     * Train the booster.
     */
    public void train() {
        // Step1. Initialize.
        WeightedInstances tempWeightedInstances = null;
        double tempError;
        numClassifiers = 0;

        // Step2. Build other classifier.
        for (int i = 0; i < classifiers.length; i++) {
            // Step2.1 Key code: Construct or adjust the weightedInstances
            if (i == 0) {
                tempWeightedInstances = new WeightedInstances(trainingData);
            } else {
                // Adjust the weights of the data
                tempWeightedInstances.adjustWeights(classifiers[i - 1].computeCorrectnessArray(),
                        classifierWeights[i - 1]);
            }//Of if

            // Step 2.2 Train the next classifier.
            classifiers[i] = new StumpClassifier(tempWeightedInstances);
            classifiers[i].train();

            tempError = classifiers[i].computeWeightedError();

            // Key code: Set the classifier weight.
            classifierWeights[i] = 0.5 * Math.log(1 / tempError - 1);
            if (classifierWeights[i] < 1e-6) {
                classifierWeights[i] = 0;
            }//Of if

            System.out.println("Classifier #" + i + " , weighted error = " + tempError + " , " +
                    "weight = " + classifierWeights[i] + "\r\n");

            numClassifiers++;

            // The accuracy is enough.
            if (stopAfterConverge) {
                double tempTrainAccuracy = computeTrainingAccuracy();
                System.out.println("The accuracy of the booster is: " + tempTrainAccuracy + "\r\n");
                if (tempTrainAccuracy > 0.99999) {
                    System.out.println("Stop at the round: " + i + " due to converge.\r\n");
                    break;
                }//Of if
            }//of if
        }//Of for i
    }//Of for train

    /**
     * Classify an instance
     *
     * @param paraInstance The given instance
     * @return The predicted label
     */
    public int classify(Instance paraInstance) {
        double[] tempLabelCountArray = new double[testingData.classAttribute().numValues()];
        for (int i = 0; i < numClassifiers; i++) {
            int tempLabel = classifiers[i].classify(paraInstance);
            tempLabelCountArray[tempLabel] += classifierWeights[i];
        }//Of for i

        int resultLabel = -1;
        double tempMax = -1;
        for (int i = 0; i < tempLabelCountArray.length; i++) {
            if (tempMax < tempLabelCountArray[i]) {
                tempMax = tempLabelCountArray[i];
                resultLabel = i;
            }//Of if
        }//Of for i

        return resultLabel;
    }//Of classify

    /**
     * Test the booster on the training data.
     *
     * @return The classification accuracy.
     */
    public double test() {
        System.out.println("Testing on " + testingData.numInstances() + " instances\r\n");

        return test(testingData);
    }//Of test

    /**
     * Test the booster.
     *
     * @param paraInstances The testing set.
     * @return The classification accuracy.
     */
    public double test(Instances paraInstances) {
        double tempCorrect = 0;
        paraInstances.setClassIndex(paraInstances.numAttributes() - 1);

        for (int i = 0; i < paraInstances.numInstances(); i++) {
            Instance tempInstance = paraInstances.instance(i);
            if (classify(tempInstance) == (int) tempInstance.classValue()) {
                tempCorrect++;
            }//Of if
        }//Of for i

        double resultAccuracy = tempCorrect / paraInstances.numInstances();
        System.out.println("The accuracy is: " + resultAccuracy);

        return resultAccuracy;
    }//Of test

    /**
     * Compute the training accuracy of the booster. It is not weighted.
     *
     * @return The training accuracy.
     */
    public double computeTrainingAccuracy() {
        double tempCorrect = 0;

        for (int i = 0; i < trainingData.numInstances(); i++) {
            if (classify(trainingData.instance(i)) == (int) trainingData.instance(i).classValue()) {
                tempCorrect++;
            }//Of if
        }//Of for i

        double tempAccuracy = tempCorrect / trainingData.numInstances();
        return tempAccuracy;
    }//Of computeTrainingAccuracy

    /**
     * The entrance of the program.
     *
     * @param args Not used now.
     */
    public static void main(String[] args) {
        System.out.println("Starting Adaboosting ..");
        Booster tempBooster = new Booster("E:\\java_code\\data\\sampledata\\iris.arff");

        tempBooster.setNumBaseClassifiers(5);
        tempBooster.train();

        System.out.println("The training accuracy is: " + tempBooster.computeTrainingAccuracy());
        tempBooster.test();
    }//Of main
}//Of class Booster

运行截图

在这里插入图片描述

本文来自互联网用户投稿,该文观点仅代表作者本人,不代表本站立场。本站仅提供信息存储空间服务,不拥有所有权,不承担相关法律责任。如若转载,请注明出处:http://www.coloradmin.cn/o/904043.html

如若内容造成侵权/违法违规/事实不符,请联系多彩编程网进行投诉反馈,一经查实,立即删除!

相关文章

计算机提示mfc120u.dll缺失(找不到)怎么解决

在计算机领域&#xff0c;mfc120u.dll是一个重要的动态链接库文件。它包含了Microsoft Foundation Class (MFC) 库的特定版本&#xff0c;用于支持Windows操作系统中的应用程序开发。修复mfc120u.dll可能涉及到解决与该库相关的问题或错误。这可能包括程序崩溃、运行时错误或其…

DAY23

题目一 给定一个全是小写字母的字符串str.删除多余字符&#xff0c;使得每种字符只保留一个&#xff0c;并让最终结果字符串的字典序最小 str "acbc"&#xff0c; 删掉第一个c&#xff0c; 得到"abc", 是所有结果字符串中字典序最小的。str "dbcacbc…

Python入门教程 | Python简介和环境搭建

Python 简介 Python是一种高级编程语言&#xff0c;由荷兰人Guido van Rossum于1991年创建。它以其简单易学、可读性强和丰富的生态系统而受到广泛喜爱。它被广泛应用于各个领域&#xff0c;包括Web开发、科学计算、数据分析、人工智能等。 Python的特点 简洁易读&#xff1a…

idea新建web项目

步骤一 步骤二 步骤三 新建两个目录lib、classes 步骤四 设置两个目录的功能lib、classes 步骤五 发布到tomcat

docker项目实战

1、使用mysql:5.6和 owncloud 镜像&#xff0c;构建一个个人网盘 1&#xff09;拉取mysql:5.6和owncloud镜像 [rootmaster ~]# docker pull mysql:5.6 5.6: Pulling from library/mysql 35b2232c987e: Pull complete fc55c00e48f2: Pull complete 0030405130e3: Pull compl…

MetaMask Mobile +Chrome DevTools 调试Web3应用教程

注&#xff1a;本教程来源网络&#xff0c;根据项目做的整理 写好了WEB3应用&#xff0c;在本地调试用得好好的&#xff0c;但是用钱包软件访问就报莫名的错&#xff0c;但是又不知道是什么原因&#xff0c;排查的过程非常浪费时间 。 因此在本地同一局域网进行调试就非常有必要…

河北人事档案管理系统

河北人事档案管理系统是一个集数字化管理、高效服务、安全可靠于一体的人事档案管理平台&#xff0c;可以集中管理机关事业单位人事档案、农村党员档案、参保职工档案、流动人才档案等&#xff0c;并实现高效、便捷的查阅和调阅服务。 河北人事档案管理系统的建设主要是为了更好…

【C++】模拟实现哈希(闭散列和开散列两种方式)

哈希 前言正式开始map、set 与 unordered_map、unordered_set 的不同遍历结果不同查找速度不同 哈希闭散列概念介绍模拟实现字符串等自定义类型找位置字符串哈希算法二次探测 开散列概念介绍模拟实现存储自定义类型哈希表大小设置为素数 前言 在C98中&#xff0c;STL提供了底层…

论文学习——FOLEY SOUND SYNTHESIS AT THE DCASE 2023 CHALLENGE(声音生成介绍)

文章目录 引言正文AbstractIntroduction问题 2 Problem And Task Definition3. Official Dataset And Baseline第一部分问题 4. Evaluation问题 4.1 Step 1&#xff1a;Objective Evaluation问题 4.2 Step 2: Subjective Evaluation问题 4.3 Execution&#xff08;非重点&#…

实验一 ubuntu 网络环境配置

ubuntu 网络环境配置 【实验目的】 掌握 ubuntu 下网络配置的基本方法&#xff0c;能够通过有线网络连通 ubuntu 和开发板 【实验环境】 ubuntu 14.04 发行版FS4412 实验平台 【注意事项】 实验步骤中以“$”开头的命令表示在 ubuntu 环境下执行&#xff0c;以“#”开头的…

华为OD机试 - ABR 车路协同场景 - (Java 2023 B卷 100分)

目录 专栏导读一、题目描述1、问题2、条件3、原型 二、输入描述三、输出描述四、Java算法源码五、效果展示1、输入2、输出 华为OD机试 2023B卷题库疯狂收录中&#xff0c;刷题点这里 专栏导读 本专栏收录于《华为OD机试&#xff08;JAVA&#xff09;真题&#xff08;A卷B卷&am…

七夕特辑(一)浪漫表白方式 用神经网络生成一首情诗

目录 一、准备工作二、用神经网络生成一首诗&#xff0c;代码说明 牛郎织女相会&#xff0c;七夕祝福要送来。祝福天下有情人&#xff0c;终成眷属永相伴。 七夕是中国传统的情人节&#xff0c;也是恋人们表达爱意的好时机。在这个特别的日子里&#xff0c;送上温馨的祝福&…

idea创建javaweb项目,jboss下没有web application

看看下图这个地方有没有web application

mybatis入门环境搭建及CRUD

一、MyBatis介绍 二、MyBatis环境搭建 创建一个maven项目&#xff0c;名为mybatis01&#xff0c;如下&#xff1a; 2.1 pom.xml修改 代码如下&#xff1a; <?xml version"1.0" encoding"UTF-8"?><project xmlns"http://maven.apache.o…

Java-抽象类和接口(下)

接口使用实例 给对象数组排序 两个学生对象的大小关系怎么确定? 需要我们额外指定. 这里需要用到Comparable 接口 在Comparable 接口内部有一个compareTo 的方法&#xff0c;我们需要实现它 在下图中&#xff0c;我们需要将o强制转换为Student 之后调用Arrays.sort(array)即…

电商项目part04 微服务拆分

微服务架构拆分 微服务介绍 英文:https://martinfowler.com/articles/microservices.html 中文:http://blog.cuicc.com/blog/2015/07/22/microservices 微服务拆分时机 如下场景是否需要进行微服务拆分&#xff1f; 代码维护困难&#xff0c;几百人同时开发一个模块&…

01 背包算法

描述 王强决定把年终奖用于购物&#xff0c;他把想买的物品分为两类&#xff1a;主件与附件&#xff0c;附件是从属于某个主件的&#xff0c;下表就是一些主件与附件的例子&#xff1a; 主件附件电脑打印机&#xff0c;扫描仪书柜图书书桌台灯&#xff0c;文具工作椅无 如果…

漏洞指北-VulFocus靶场专栏-中级02

漏洞指北-VulFocus靶场专栏-中级02 中级005 &#x1f338;thinkphp lang 命令执行&#xff08;thinkphp:6.0.12&#xff09;&#x1f338;step1&#xff1a;burp suite 抓包 修改请求头step2 修改成功&#xff0c;访问shell.php 中级006 &#x1f338;Metabase geojson任意文件…

Linux内核源码分析-内存管理

Linux内核内存布局 64位Linux系统一般使用48位表示虚拟地址空间&#xff0c;45位表示物理地址。通过命令&#xff1a;cat /proc/cpuinfo。查看Linux内核位数和proc文件系统输出系统软硬件信息如下&#xff1a; x86_64架构体系内核分布情况 通过 cat /proc/meminfo 输出系统架…