-
Notifications
You must be signed in to change notification settings - Fork 0
/
index.html
174 lines (157 loc) · 32.7 KB
/
index.html
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
<!DOCTYPE html><html lang="zh-CN" data-theme="light"><head><meta charset="UTF-8"><meta http-equiv="X-UA-Compatible" content="IE=edge"><meta name="viewport" content="width=device-width, initial-scale=1.0,viewport-fit=cover"><title>Cloni</title><meta name="author" content="cloni"><meta name="copyright" content="cloni"><meta name="format-detection" content="telephone=no"><meta name="theme-color" content="#ffffff"><meta property="og:type" content="website">
<meta property="og:title" content="Cloni">
<meta property="og:url" content="http://cloni418.github.io/index.html">
<meta property="og:site_name" content="Cloni">
<meta property="og:locale" content="zh_CN">
<meta property="og:image" content="http://cloni418.github.io/icon/avatar-compress.jpg">
<meta property="article:author" content="cloni">
<meta name="twitter:card" content="summary">
<meta name="twitter:image" content="http://cloni418.github.io/icon/avatar-compress.jpg"><link rel="shortcut icon" href="/icon/favicon.svg"><link rel="canonical" href="http://cloni418.github.io/index.html"><link rel="preconnect" href="//cdn.jsdelivr.net"/><link rel="stylesheet" href="/css/index.css?v=4.13.0"><link rel="stylesheet" href="https://cdn.jsdelivr.net/npm/@fortawesome/fontawesome-free@6.5.1/css/all.min.css"><link rel="stylesheet" href="https://cdn.jsdelivr.net/npm/@fancyapps/ui@5.0.33/dist/fancybox/fancybox.min.css" media="print" onload="this.media='all'"><script>const GLOBAL_CONFIG = {
root: '/',
algolia: undefined,
localSearch: {"path":"/search.xml","preload":false,"top_n_per_article":1,"unescape":true,"languages":{"hits_empty":"找不到您查询的内容:${query}","hits_stats":"共找到 ${hits} 篇文章"}},
translate: undefined,
noticeOutdate: undefined,
highlight: {"plugin":"highlight.js","highlightCopy":true,"highlightLang":true,"highlightHeightLimit":false},
copy: {
success: '复制成功',
error: '复制错误',
noSupport: '浏览器不支持'
},
relativeDate: {
homepage: false,
post: false
},
runtime: '',
dateSuffix: {
just: '刚刚',
min: '分钟前',
hour: '小时前',
day: '天前',
month: '个月前'
},
copyright: undefined,
lightbox: 'fancybox',
Snackbar: undefined,
infinitegrid: {
js: 'https://cdn.jsdelivr.net/npm/@egjs/infinitegrid@4.11.1/dist/infinitegrid.min.js',
buttonText: '加载更多'
},
isPhotoFigcaption: false,
islazyload: true,
isAnchor: false,
percent: {
toc: true,
rightside: false,
},
autoDarkmode: false
}</script><script id="config-diff">var GLOBAL_CONFIG_SITE = {
title: 'Cloni',
isPost: false,
isHome: true,
isHighlightShrink: false,
isToc: false,
postUpdate: '2024-11-09 22:25:49'
}</script><script>(win=>{
win.saveToLocal = {
set: (key, value, ttl) => {
if (ttl === 0) return
const now = Date.now()
const expiry = now + ttl * 86400000
const item = {
value,
expiry
}
localStorage.setItem(key, JSON.stringify(item))
},
get: key => {
const itemStr = localStorage.getItem(key)
if (!itemStr) {
return undefined
}
const item = JSON.parse(itemStr)
const now = Date.now()
if (now > item.expiry) {
localStorage.removeItem(key)
return undefined
}
return item.value
}
}
win.getScript = (url, attr = {}) => new Promise((resolve, reject) => {
const script = document.createElement('script')
script.src = url
script.async = true
script.onerror = reject
script.onload = script.onreadystatechange = function() {
const loadState = this.readyState
if (loadState && loadState !== 'loaded' && loadState !== 'complete') return
script.onload = script.onreadystatechange = null
resolve()
}
Object.keys(attr).forEach(key => {
script.setAttribute(key, attr[key])
})
document.head.appendChild(script)
})
win.getCSS = (url, id = false) => new Promise((resolve, reject) => {
const link = document.createElement('link')
link.rel = 'stylesheet'
link.href = url
if (id) link.id = id
link.onerror = reject
link.onload = link.onreadystatechange = function() {
const loadState = this.readyState
if (loadState && loadState !== 'loaded' && loadState !== 'complete') return
link.onload = link.onreadystatechange = null
resolve()
}
document.head.appendChild(link)
})
win.activateDarkMode = () => {
document.documentElement.setAttribute('data-theme', 'dark')
if (document.querySelector('meta[name="theme-color"]') !== null) {
document.querySelector('meta[name="theme-color"]').setAttribute('content', '#0d0d0d')
}
}
win.activateLightMode = () => {
document.documentElement.setAttribute('data-theme', 'light')
if (document.querySelector('meta[name="theme-color"]') !== null) {
document.querySelector('meta[name="theme-color"]').setAttribute('content', '#ffffff')
}
}
const t = saveToLocal.get('theme')
if (t === 'dark') activateDarkMode()
else if (t === 'light') activateLightMode()
const asideStatus = saveToLocal.get('aside-status')
if (asideStatus !== undefined) {
if (asideStatus === 'hide') {
document.documentElement.classList.add('hide-aside')
} else {
document.documentElement.classList.remove('hide-aside')
}
}
const detectApple = () => {
if(/iPad|iPhone|iPod|Macintosh/.test(navigator.userAgent)){
document.documentElement.classList.add('apple')
}
}
detectApple()
})(window)</script><link rel="stylesheet" href="/css/post_style.css"><meta name="generator" content="Hexo 7.2.0"></head><body><div id="web_bg"></div><div id="sidebar"><div id="menu-mask"></div><div id="sidebar-menus"><div class="avatar-img is-center"><img src= "data:image/gif;base64,R0lGODlhAQABAIAAAAAAAP///yH5BAEAAAAALAAAAAABAAEAAAIBRAA7" data-lazy-src="/icon/avatar-compress.jpg" onerror="onerror=null;src='/img/friend_404.gif'" alt="avatar"/></div><div class="sidebar-site-data site-data is-center"><a href="/archives/"><div class="headline">文章</div><div class="length-num">9</div></a><a href="/tags/"><div class="headline">标签</div><div class="length-num">10</div></a><a href="/categories/"><div class="headline">分类</div><div class="length-num">12</div></a></div><hr class="custom-hr"/><div class="menus_items"><div class="menus_item"><a class="site-page" href="/"><i class="fa-fw fas fa-home"></i><span> Home</span></a></div><div class="menus_item"><a class="site-page" href="/archives/"><i class="fa-fw fas fa-archive"></i><span> Archives</span></a></div><div class="menus_item"><a class="site-page" href="/tags/"><i class="fa-fw fas fa-tags"></i><span> Tags</span></a></div><div class="menus_item"><a class="site-page" href="/categories/"><i class="fa-fw fas fa-folder-open"></i><span> Categories</span></a></div><div class="menus_item"><a class="site-page" href="/about/"><i class="fa-fw fas fa-heart"></i><span> About</span></a></div></div></div></div><div class="page" id="body-wrap"><header class="full_page" id="page-header" style="background-image: url('/icon/banner.jpg')"><nav id="nav"><span id="blog-info"><a href="/" title="Cloni"><span class="site-name">Cloni</span></a></span><div id="menus"><div id="search-button"><a class="site-page social-icon search" href="javascript:void(0);"><i class="fas fa-search fa-fw"></i><span> 搜索</span></a></div><div class="menus_items"><div class="menus_item"><a class="site-page" href="/"><i class="fa-fw fas fa-home"></i><span> Home</span></a></div><div class="menus_item"><a class="site-page" href="/archives/"><i class="fa-fw fas fa-archive"></i><span> Archives</span></a></div><div class="menus_item"><a class="site-page" href="/tags/"><i class="fa-fw fas fa-tags"></i><span> Tags</span></a></div><div class="menus_item"><a class="site-page" href="/categories/"><i class="fa-fw fas fa-folder-open"></i><span> Categories</span></a></div><div class="menus_item"><a class="site-page" href="/about/"><i class="fa-fw fas fa-heart"></i><span> About</span></a></div></div><div id="toggle-menu"><a class="site-page" href="javascript:void(0);"><i class="fas fa-bars fa-fw"></i></a></div></div></nav><div id="site-info"><h1 id="site-title">Cloni</h1></div><div id="scroll-down"><i class="fas fa-angle-down scroll-down-effects"></i></div></header><main class="layout" id="content-inner"><div class="recent-posts" id="recent-posts"><div class="recent-post-item"><div class="recent-post-info no-cover"><a class="article-title" href="/paper%20notes/Prompt/" title="Prompt Tuning">Prompt Tuning</a><div class="article-meta-wrap"><span class="post-meta-date"><i class="far fa-calendar-alt"></i><span class="article-meta-label">发表于</span><time datetime="2024-11-05T16:00:00.000Z" title="发表于 2024-11-06 00:00:00">2024-11-06</time></span><span class="article-meta"><span class="article-meta-separator">|</span><i class="fas fa-inbox"></i><a class="article-meta__categories" href="/categories/work/">work</a><i class="fas fa-angle-right article-meta-link"></i><i class="fas fa-inbox"></i><a class="article-meta__categories" href="/categories/work/fine-tune/">fine-tune</a></span></div><div class="content">参考文章:万字长文 Prompt Tuning!
为什么要引入 Prompt?
Prompt 的经典方法的发展
1. 为什么要引入 prompt?
prompt 旨在解决传统预训练模型(BERT、GPT 等)在 Fine-tune 时的两个痛点:
降低语义差异。 预训练任务主要以 Mask Language Modeling 为主(next prediction 等效果不好,不常用),而下游任务则重新引入新的训练参数,两个阶段的训练目标具有较大差异。因此需要缩小 Pre-train 和 Fine-tune 两个阶段目标差异过大的问题。
避免过拟合。 由于 Fine-tune 需要引入额外的参数以适配相应的任务需要,因此在样本数量有限的情况下容易发生过拟合,模型泛化能力下降。
2. Prompt 的定义
以下游情感分类为例,prompt 希望将分类问题转换为预训练的重构 Mask 问题。给定句子[CLS] I like the Disney films very much.[SEP],传统分类方法是得到 CLS 表征后通过 MLP 进行分类。
Prompt 有一下两个重要概念 ...</div></div></div><div class="recent-post-item"><div class="recent-post-info no-cover"><a class="article-title" href="/paper%20notes/CrossFormer/" title="CROSSFORMER: TRANSFORMER UTILIZING CROSSDIMENSION DEPENDENCY FOR MULTIVARIATE TIME SERIES FORECASTING">CROSSFORMER: TRANSFORMER UTILIZING CROSSDIMENSION DEPENDENCY FOR MULTIVARIATE TIME SERIES FORECASTING</a><div class="article-meta-wrap"><span class="post-meta-date"><i class="far fa-calendar-alt"></i><span class="article-meta-label">发表于</span><time datetime="2024-07-22T16:00:00.000Z" title="发表于 2024-07-23 00:00:00">2024-07-23</time></span><span class="article-meta"><span class="article-meta-separator">|</span><i class="fas fa-inbox"></i><a class="article-meta__categories" href="/categories/%E6%97%B6%E9%97%B4%E5%BA%8F%E5%88%97/">时间序列</a><i class="fas fa-angle-right article-meta-link"></i><i class="fas fa-inbox"></i><a class="article-meta__categories" href="/categories/%E6%97%B6%E9%97%B4%E5%BA%8F%E5%88%97/%E9%A2%84%E6%B5%8B/">预测</a></span><span class="article-meta tags"><span class="article-meta-separator">|</span><i class="fas fa-tag"></i><a class="article-meta__tags" href="/tags/ICLR/">ICLR</a><span class="article-meta-link">•</span><i class="fas fa-tag"></i><a class="article-meta__tags" href="/tags/correlation/">correlation</a></span></div><div class="content">变量内与变量间attention,使用router降低变量间attention复杂度</div></div></div><div class="recent-post-item"><div class="post_cover right"><a href="/paper%20notes/iTransformer/" title="ITRANSFORMER: INVERTED TRANSFORMERS ARE EFFECTIVE FOR TIME SERIES FORECASTING"><img class="post-bg" src= "data:image/gif;base64,R0lGODlhAQABAIAAAAAAAP///yH5BAEAAAAALAAAAAABAAEAAAIBRAA7" data-lazy-src="/img/iTransformer-1.png" onerror="this.onerror=null;this.src='/img/404.jpg'" alt="ITRANSFORMER: INVERTED TRANSFORMERS ARE EFFECTIVE FOR TIME SERIES FORECASTING"></a></div><div class="recent-post-info"><a class="article-title" href="/paper%20notes/iTransformer/" title="ITRANSFORMER: INVERTED TRANSFORMERS ARE EFFECTIVE FOR TIME SERIES FORECASTING">ITRANSFORMER: INVERTED TRANSFORMERS ARE EFFECTIVE FOR TIME SERIES FORECASTING</a><div class="article-meta-wrap"><span class="post-meta-date"><i class="far fa-calendar-alt"></i><span class="article-meta-label">发表于</span><time datetime="2024-07-19T16:00:00.000Z" title="发表于 2024-07-20 00:00:00">2024-07-20</time></span><span class="article-meta"><span class="article-meta-separator">|</span><i class="fas fa-inbox"></i><a class="article-meta__categories" href="/categories/paper/">paper</a><i class="fas fa-angle-right article-meta-link"></i><i class="fas fa-inbox"></i><a class="article-meta__categories" href="/categories/paper/%E6%97%B6%E9%97%B4%E5%BA%8F%E5%88%97/">时间序列</a><i class="fas fa-angle-right article-meta-link"></i><i class="fas fa-inbox"></i><a class="article-meta__categories" href="/categories/paper/%E6%97%B6%E9%97%B4%E5%BA%8F%E5%88%97/%E9%A2%84%E6%B5%8B/">预测</a></span><span class="article-meta tags"><span class="article-meta-separator">|</span><i class="fas fa-tag"></i><a class="article-meta__tags" href="/tags/ICLR/">ICLR</a><span class="article-meta-link">•</span><i class="fas fa-tag"></i><a class="article-meta__tags" href="/tags/spotlight/">spotlight</a></span></div><div class="content">对时序的通道进行embedding</div></div></div><div class="recent-post-item"><div class="post_cover right"><a href="/paper%20notes/LoRA/" title="LoRA: LOW-RANK ADAPTATION OF LARGE LANGUAGE MODELS"><img class="post-bg" src= "data:image/gif;base64,R0lGODlhAQABAIAAAAAAAP///yH5BAEAAAAALAAAAAABAAEAAAIBRAA7" data-lazy-src="/img/LoRA-1.png" onerror="this.onerror=null;this.src='/img/404.jpg'" alt="LoRA: LOW-RANK ADAPTATION OF LARGE LANGUAGE MODELS"></a></div><div class="recent-post-info"><a class="article-title" href="/paper%20notes/LoRA/" title="LoRA: LOW-RANK ADAPTATION OF LARGE LANGUAGE MODELS">LoRA: LOW-RANK ADAPTATION OF LARGE LANGUAGE MODELS</a><div class="article-meta-wrap"><span class="post-meta-date"><i class="far fa-calendar-alt"></i><span class="article-meta-label">发表于</span><time datetime="2024-07-08T16:00:00.000Z" title="发表于 2024-07-09 00:00:00">2024-07-09</time></span><span class="article-meta"><span class="article-meta-separator">|</span><i class="fas fa-inbox"></i><a class="article-meta__categories" href="/categories/paper/">paper</a><i class="fas fa-angle-right article-meta-link"></i><i class="fas fa-inbox"></i><a class="article-meta__categories" href="/categories/paper/big-model/">big-model</a><i class="fas fa-angle-right article-meta-link"></i><i class="fas fa-inbox"></i><a class="article-meta__categories" href="/categories/paper/big-model/fine-tune/">fine-tune</a></span></div><div class="content">通过微调外接低秩矩阵来替代全量微调</div></div></div><div class="recent-post-item"><div class="post_cover right"><a href="/paper%20notes/ACmix/" title="On the Integration of Self-Attention and Convolution"><img class="post-bg" src= "data:image/gif;base64,R0lGODlhAQABAIAAAAAAAP///yH5BAEAAAAALAAAAAABAAEAAAIBRAA7" data-lazy-src="/img/ACmix-4.png" onerror="this.onerror=null;this.src='/img/404.jpg'" alt="On the Integration of Self-Attention and Convolution"></a></div><div class="recent-post-info"><a class="article-title" href="/paper%20notes/ACmix/" title="On the Integration of Self-Attention and Convolution">On the Integration of Self-Attention and Convolution</a><div class="article-meta-wrap"><span class="post-meta-date"><i class="far fa-calendar-alt"></i><span class="article-meta-label">发表于</span><time datetime="2024-07-07T16:00:00.000Z" title="发表于 2024-07-08 00:00:00">2024-07-08</time></span><span class="article-meta"><span class="article-meta-separator">|</span><i class="fas fa-inbox"></i><a class="article-meta__categories" href="/categories/paper/">paper</a><i class="fas fa-angle-right article-meta-link"></i><i class="fas fa-inbox"></i><a class="article-meta__categories" href="/categories/paper/CNN-attention/">CNN+attention</a></span><span class="article-meta tags"><span class="article-meta-separator">|</span><i class="fas fa-tag"></i><a class="article-meta__tags" href="/tags/CVPR/">CVPR</a></span></div><div class="content">新的视角来融合CNN和self-attention</div></div></div><div class="recent-post-item"><div class="post_cover right"><a href="/paper%20notes/MOIRAI/" title="Unified Training of Universal Time Series Forecasting Transformers"><img class="post-bg" src= "data:image/gif;base64,R0lGODlhAQABAIAAAAAAAP///yH5BAEAAAAALAAAAAABAAEAAAIBRAA7" data-lazy-src="/img/MOIRAI-1.png" onerror="this.onerror=null;this.src='/img/404.jpg'" alt="Unified Training of Universal Time Series Forecasting Transformers"></a></div><div class="recent-post-info"><a class="article-title" href="/paper%20notes/MOIRAI/" title="Unified Training of Universal Time Series Forecasting Transformers">Unified Training of Universal Time Series Forecasting Transformers</a><div class="article-meta-wrap"><span class="post-meta-date"><i class="far fa-calendar-alt"></i><span class="article-meta-label">发表于</span><time datetime="2024-07-07T16:00:00.000Z" title="发表于 2024-07-08 00:00:00">2024-07-08</time></span><span class="article-meta"><span class="article-meta-separator">|</span><i class="fas fa-inbox"></i><a class="article-meta__categories" href="/categories/paper/">paper</a><i class="fas fa-angle-right article-meta-link"></i><i class="fas fa-inbox"></i><a class="article-meta__categories" href="/categories/paper/%E6%97%B6%E9%97%B4%E5%BA%8F%E5%88%97/">时间序列</a><i class="fas fa-angle-right article-meta-link"></i><i class="fas fa-inbox"></i><a class="article-meta__categories" href="/categories/paper/%E6%97%B6%E9%97%B4%E5%BA%8F%E5%88%97/%E9%A2%84%E6%B5%8B/">预测</a></span><span class="article-meta tags"><span class="article-meta-separator">|</span><i class="fas fa-tag"></i><a class="article-meta__tags" href="/tags/ICML/">ICML</a><span class="article-meta-link">•</span><i class="fas fa-tag"></i><a class="article-meta__tags" href="/tags/Oral/">Oral</a></span></div><div class="content">通用时序预测模型,通过展平加变量embedding处理多元时序;多尺度patch size</div></div></div><div class="recent-post-item"><div class="post_cover right"><a href="/paper%20notes/CrossViT/" title="CrossViT: Cross-Attention Multi-Scale Vision Transformer for Image Classification"><img class="post-bg" src= "data:image/gif;base64,R0lGODlhAQABAIAAAAAAAP///yH5BAEAAAAALAAAAAABAAEAAAIBRAA7" data-lazy-src="/img/CrossViT-1.png" onerror="this.onerror=null;this.src='/img/404.jpg'" alt="CrossViT: Cross-Attention Multi-Scale Vision Transformer for Image Classification"></a></div><div class="recent-post-info"><a class="article-title" href="/paper%20notes/CrossViT/" title="CrossViT: Cross-Attention Multi-Scale Vision Transformer for Image Classification">CrossViT: Cross-Attention Multi-Scale Vision Transformer for Image Classification</a><div class="article-meta-wrap"><span class="post-meta-date"><i class="far fa-calendar-alt"></i><span class="article-meta-label">发表于</span><time datetime="2024-06-22T16:00:00.000Z" title="发表于 2024-06-23 00:00:00">2024-06-23</time></span><span class="article-meta"><span class="article-meta-separator">|</span><i class="fas fa-inbox"></i><a class="article-meta__categories" href="/categories/paper/">paper</a><i class="fas fa-angle-right article-meta-link"></i><i class="fas fa-inbox"></i><a class="article-meta__categories" href="/categories/paper/CV/">CV</a></span><span class="article-meta tags"><span class="article-meta-separator">|</span><i class="fas fa-tag"></i><a class="article-meta__tags" href="/tags/ICCV/">ICCV</a></span></div><div class="content">dual-branch 多尺度vision transformer。提出一种基于cross-attention的token fusion scheme</div></div></div><div class="recent-post-item"><div class="post_cover right"><a href="/paper%20notes/MLA/" title="MULTIMODAL REPRESENTATION LEARNING BY ALTERNATING UNIMODAL ADAPTATION"><img class="post-bg" src= "data:image/gif;base64,R0lGODlhAQABAIAAAAAAAP///yH5BAEAAAAALAAAAAABAAEAAAIBRAA7" data-lazy-src="/img/MLA-1.png" onerror="this.onerror=null;this.src='/img/404.jpg'" alt="MULTIMODAL REPRESENTATION LEARNING BY ALTERNATING UNIMODAL ADAPTATION"></a></div><div class="recent-post-info"><a class="article-title" href="/paper%20notes/MLA/" title="MULTIMODAL REPRESENTATION LEARNING BY ALTERNATING UNIMODAL ADAPTATION">MULTIMODAL REPRESENTATION LEARNING BY ALTERNATING UNIMODAL ADAPTATION</a><div class="article-meta-wrap"><span class="post-meta-date"><i class="far fa-calendar-alt"></i><span class="article-meta-label">发表于</span><time datetime="2024-06-21T16:00:00.000Z" title="发表于 2024-06-22 00:00:00">2024-06-22</time></span><span class="article-meta"><span class="article-meta-separator">|</span><i class="fas fa-inbox"></i><a class="article-meta__categories" href="/categories/paper/">paper</a><i class="fas fa-angle-right article-meta-link"></i><i class="fas fa-inbox"></i><a class="article-meta__categories" href="/categories/paper/%E5%A4%9A%E6%A8%A1%E6%80%81/">多模态</a></span><span class="article-meta tags"><span class="article-meta-separator">|</span><i class="fas fa-tag"></i><a class="article-meta__tags" href="/tags/CVPR/">CVPR</a></span></div><div class="content">交替优化不同模态;通过正交权值修改避免模态遗忘。</div></div></div><div class="recent-post-item"><div class="post_cover right"><a href="/paper%20notes/Autoformer/" title="Autoformer: Decomposition Transformers with Auto-Correlation for Long-Term Series Forecasting"><img class="post-bg" src= "data:image/gif;base64,R0lGODlhAQABAIAAAAAAAP///yH5BAEAAAAALAAAAAABAAEAAAIBRAA7" data-lazy-src="/img/Autoformer-1.png" onerror="this.onerror=null;this.src='/img/404.jpg'" alt="Autoformer: Decomposition Transformers with Auto-Correlation for Long-Term Series Forecasting"></a></div><div class="recent-post-info"><a class="article-title" href="/paper%20notes/Autoformer/" title="Autoformer: Decomposition Transformers with Auto-Correlation for Long-Term Series Forecasting">Autoformer: Decomposition Transformers with Auto-Correlation for Long-Term Series Forecasting</a><div class="article-meta-wrap"><span class="post-meta-date"><i class="far fa-calendar-alt"></i><span class="article-meta-label">发表于</span><time datetime="2023-07-05T16:00:00.000Z" title="发表于 2023-07-06 00:00:00">2023-07-06</time></span><span class="article-meta"><span class="article-meta-separator">|</span><i class="fas fa-inbox"></i><a class="article-meta__categories" href="/categories/paper/">paper</a></span><span class="article-meta tags"><span class="article-meta-separator">|</span><i class="fas fa-tag"></i><a class="article-meta__tags" href="/tags/NeurIPS/">NeurIPS</a><span class="article-meta-link">•</span><i class="fas fa-tag"></i><a class="article-meta__tags" href="/tags/%E6%97%B6%E9%97%B4%E5%BA%8F%E5%88%97/">时间序列</a><span class="article-meta-link">•</span><i class="fas fa-tag"></i><a class="article-meta__tags" href="/tags/%E9%A2%84%E6%B5%8B/">预测</a></span></div><div class="content">Autoformer。添加decomposition block提取模型中隐藏状态的内在复杂时序趋势。提出Auto-Correlation机制替代self-attention,其考虑sub-series间的相似度能更好的捕捉到趋势性,不仅保证了O(LlnL)的复杂度,也防止了信息的丢失,做到了又快又好。将点对点的attention改进为sub-series之间。</div></div></div><nav id="pagination"><div class="pagination"><span class="page-number current">1</span></div></nav></div><div class="aside-content" id="aside-content"><div class="card-widget card-info"><div class="is-center"><div class="avatar-img"><img src= "data:image/gif;base64,R0lGODlhAQABAIAAAAAAAP///yH5BAEAAAAALAAAAAABAAEAAAIBRAA7" data-lazy-src="/icon/avatar-compress.jpg" onerror="this.onerror=null;this.src='/img/friend_404.gif'" alt="avatar"/></div><div class="author-info__name">cloni</div><div class="author-info__description"></div></div><div class="card-info-data site-data is-center"><a href="/archives/"><div class="headline">文章</div><div class="length-num">9</div></a><a href="/tags/"><div class="headline">标签</div><div class="length-num">10</div></a><a href="/categories/"><div class="headline">分类</div><div class="length-num">12</div></a></div></div><div class="sticky_layout"><div class="card-widget card-recent-post"><div class="item-headline"><i class="fas fa-history"></i><span>最新文章</span></div><div class="aside-list"><div class="aside-list-item no-cover"><div class="content"><a class="title" href="/paper%20notes/Prompt/" title="Prompt Tuning">Prompt Tuning</a><time datetime="2024-11-05T16:00:00.000Z" title="发表于 2024-11-06 00:00:00">2024-11-06</time></div></div><div class="aside-list-item no-cover"><div class="content"><a class="title" href="/paper%20notes/CrossFormer/" title="CROSSFORMER: TRANSFORMER UTILIZING CROSSDIMENSION DEPENDENCY FOR MULTIVARIATE TIME SERIES FORECASTING">CROSSFORMER: TRANSFORMER UTILIZING CROSSDIMENSION DEPENDENCY FOR MULTIVARIATE TIME SERIES FORECASTING</a><time datetime="2024-07-22T16:00:00.000Z" title="发表于 2024-07-23 00:00:00">2024-07-23</time></div></div><div class="aside-list-item no-cover"><div class="content"><a class="title" href="/paper%20notes/iTransformer/" title="ITRANSFORMER: INVERTED TRANSFORMERS ARE EFFECTIVE FOR TIME SERIES FORECASTING">ITRANSFORMER: INVERTED TRANSFORMERS ARE EFFECTIVE FOR TIME SERIES FORECASTING</a><time datetime="2024-07-19T16:00:00.000Z" title="发表于 2024-07-20 00:00:00">2024-07-20</time></div></div><div class="aside-list-item no-cover"><div class="content"><a class="title" href="/paper%20notes/LoRA/" title="LoRA: LOW-RANK ADAPTATION OF LARGE LANGUAGE MODELS">LoRA: LOW-RANK ADAPTATION OF LARGE LANGUAGE MODELS</a><time datetime="2024-07-08T16:00:00.000Z" title="发表于 2024-07-09 00:00:00">2024-07-09</time></div></div><div class="aside-list-item no-cover"><div class="content"><a class="title" href="/paper%20notes/ACmix/" title="On the Integration of Self-Attention and Convolution">On the Integration of Self-Attention and Convolution</a><time datetime="2024-07-07T16:00:00.000Z" title="发表于 2024-07-08 00:00:00">2024-07-08</time></div></div></div></div><div class="card-widget card-categories"><div class="item-headline">
<i class="fas fa-folder-open"></i>
<span>分类</span>
<a class="card-more-btn" href="/categories/" title="查看更多">
<i class="fas fa-angle-right"></i></a>
</div>
<ul class="card-category-list" id="aside-cat-list">
<li class="card-category-list-item "><a class="card-category-list-link" href="/categories/paper/"><span class="card-category-list-name">paper</span><span class="card-category-list-count">7</span></a><ul class="card-category-list child"><li class="card-category-list-item "><a class="card-category-list-link" href="/categories/paper/CNN-attention/"><span class="card-category-list-name">CNN+attention</span><span class="card-category-list-count">1</span></a></li><li class="card-category-list-item "><a class="card-category-list-link" href="/categories/paper/CV/"><span class="card-category-list-name">CV</span><span class="card-category-list-count">1</span></a></li><li class="card-category-list-item "><a class="card-category-list-link" href="/categories/paper/big-model/"><span class="card-category-list-name">big-model</span><span class="card-category-list-count">1</span></a><ul class="card-category-list child"><li class="card-category-list-item "><a class="card-category-list-link" href="/categories/paper/big-model/fine-tune/"><span class="card-category-list-name">fine-tune</span><span class="card-category-list-count">1</span></a></li></ul></li><li class="card-category-list-item "><a class="card-category-list-link" href="/categories/paper/%E5%A4%9A%E6%A8%A1%E6%80%81/"><span class="card-category-list-name">多模态</span><span class="card-category-list-count">1</span></a></li><li class="card-category-list-item "><a class="card-category-list-link" href="/categories/paper/%E6%97%B6%E9%97%B4%E5%BA%8F%E5%88%97/"><span class="card-category-list-name">时间序列</span><span class="card-category-list-count">2</span></a><ul class="card-category-list child"><li class="card-category-list-item "><a class="card-category-list-link" href="/categories/paper/%E6%97%B6%E9%97%B4%E5%BA%8F%E5%88%97/%E9%A2%84%E6%B5%8B/"><span class="card-category-list-name">预测</span><span class="card-category-list-count">2</span></a></li></ul></li></ul></li>
</ul></div><div class="card-widget card-tags"><div class="item-headline"><i class="fas fa-tags"></i><span>标签</span></div><div class="card-tag-cloud"><a href="/tags/Oral/" style="font-size: 1.1em; color: #999">Oral</a> <a href="/tags/ICCV/" style="font-size: 1.1em; color: #999">ICCV</a> <a href="/tags/%E6%97%B6%E9%97%B4%E5%BA%8F%E5%88%97/" style="font-size: 1.1em; color: #999">时间序列</a> <a href="/tags/ICML/" style="font-size: 1.1em; color: #999">ICML</a> <a href="/tags/CVPR/" style="font-size: 1.5em; color: #99a9bf">CVPR</a> <a href="/tags/NeurIPS/" style="font-size: 1.1em; color: #999">NeurIPS</a> <a href="/tags/correlation/" style="font-size: 1.1em; color: #999">correlation</a> <a href="/tags/ICLR/" style="font-size: 1.5em; color: #99a9bf">ICLR</a> <a href="/tags/%E9%A2%84%E6%B5%8B/" style="font-size: 1.1em; color: #999">预测</a> <a href="/tags/spotlight/" style="font-size: 1.1em; color: #999">spotlight</a></div></div><div class="card-widget card-archives"><div class="item-headline"><i class="fas fa-archive"></i><span>归档</span></div><ul class="card-archive-list"><li class="card-archive-list-item"><a class="card-archive-list-link" href="/archives/2024/11/"><span class="card-archive-list-date">十一月 2024</span><span class="card-archive-list-count">1</span></a></li><li class="card-archive-list-item"><a class="card-archive-list-link" href="/archives/2024/07/"><span class="card-archive-list-date">七月 2024</span><span class="card-archive-list-count">5</span></a></li><li class="card-archive-list-item"><a class="card-archive-list-link" href="/archives/2024/06/"><span class="card-archive-list-date">六月 2024</span><span class="card-archive-list-count">2</span></a></li><li class="card-archive-list-item"><a class="card-archive-list-link" href="/archives/2023/07/"><span class="card-archive-list-date">七月 2023</span><span class="card-archive-list-count">1</span></a></li></ul></div></div></div></main><footer id="footer" style="background: rgb(17, 34, 35)"><div id="footer-wrap"><div class="copyright">©2024 By cloni</div><div class="framework-info"><span>框架 </span><a target="_blank" rel="noopener" href="https://hexo.io">Hexo</a><span class="footer-separator">|</span><span>主题 </span><a target="_blank" rel="noopener" href="https://github.com/jerryc127/hexo-theme-butterfly">Butterfly</a></div></div></footer></div><div id="rightside"><div id="rightside-config-hide"><button id="darkmode" type="button" title="浅色和深色模式转换"><i class="fas fa-adjust"></i></button><button id="hide-aside-btn" type="button" title="单栏和双栏切换"><i class="fas fa-arrows-alt-h"></i></button></div><div id="rightside-config-show"><button id="rightside-config" type="button" title="设置"><i class="fas fa-cog fa-spin"></i></button><button id="go-up" type="button" title="回到顶部"><span class="scroll-percent"></span><i class="fas fa-arrow-up"></i></button></div></div><div><script src="/js/utils.js?v=4.13.0"></script><script src="/js/main.js?v=4.13.0"></script><script src="https://cdn.jsdelivr.net/npm/@fancyapps/ui@5.0.33/dist/fancybox/fancybox.umd.min.js"></script><script src="https://cdn.jsdelivr.net/npm/instant.page@5.2.0/instantpage.min.js" type="module"></script><script src="https://cdn.jsdelivr.net/npm/vanilla-lazyload@17.8.8/dist/lazyload.iife.min.js"></script><div class="js-pjax"></div><div id="local-search"><div class="search-dialog"><nav class="search-nav"><span class="search-dialog-title">搜索</span><span id="loading-status"></span><button class="search-close-button"><i class="fas fa-times"></i></button></nav><div class="is-center" id="loading-database"><i class="fas fa-spinner fa-pulse"></i><span> 数据库加载中</span></div><div class="search-wrap"><div id="local-search-input"><div class="local-search-box"><input class="local-search-box--input" placeholder="搜索文章" type="text"/></div></div><hr/><div id="local-search-results"></div><div id="local-search-stats-wrap"></div></div></div><div id="search-mask"></div><script src="/js/search/local-search.js?v=4.13.0"></script></div></div></body></html>