# random函数import random
if __name__ =="__main__":# 生成一个0到1之间的随机小数,赋值给a
a = random.random()print("a = ", a)# 生成一个[0,101)之间的随机整数,赋值给b
b = random.randint(0,101)print("b = ", b)# 生成一个[0,10)之间的随机小数,赋值给c
c = random.uniform(0,10)print("c = ", c)# 随机生成[0, 101)之间的偶数,递增数为2
d = random.randrange(0,101,2)print("d = ", d)# 随机生成一个字符
e = random.choice('abcdefg&#%^*f')print("e = ", e)# 随机返回一个字符串
f = random.choice(['apple','pear','peach','orange','lemon'])print("f = ", f)# 从给定的多个字符中,随机生成3个字符
g = random.sample('abcdefghijklmnopqrstuvwxyz',3)print("g = ", g)# 将一个列表中的元素打乱,随机排序
num =[9,6,4,0,2,5,3,7,1,8]
num1 =[1,2,3,4,5,6,7,8,9,10]
random.shuffle(num)print("num = ", num)
random.shuffle(num1)print("num1 = ", num1)
a = 0.5761180882798892
b = 96
c = 8.111252459197054
d = 26
e = e
f = apple
g = ['d', 'a', 'j']
num = [4, 3, 1, 2, 5, 8, 0, 7, 9, 6]
num1 = [7, 1, 6, 3, 2, 8, 10, 5, 4, 9]
参考: Attention Is All You Need 论文解读:Attention is All you need Transformer模型中的attention结构作用是什么? 如何最简单、通俗地理解Transformer? Transformer 新型神经网络,基于注意力机制 的 编码器-解码器 的序列处…
一维前缀和[模板]
一、题目描述 OJ题目链接:【模板】前缀和_牛客题霸_牛客网
二、思路解析 三、代码
#include <iostream>
using namespace std;
const int N 100010;
long long arr[N], dp[N];
int n, q;
int main()
{cin >> n >> q;// 读取…