% Encoding: UTF-8 @Article{Boykov2001, author = {Yuri Boykov and Olga Veksler and Ramin Zabih}, journal = {Pattern Analysis and Machine Intelligence, IEEE Transactions on}, language = {Endlish}, pages = {1222-1239}, title = {Fast approximate energy minimization via graph cuts}, volume = {23}, year = {2001}, number = {11}, __markedentry = {[喻垚慎:]}, abstract = {Many tasks in computer vision involve assigning a label (such as disparity) to every pixel. A common constraint is that the labels should vary smoothly almost everywhere while preserving sharp discontinuities that may exist, e.g., at object boundaries. These tasks are naturally stated in terms of energy minimization. The authors consider a wide class of energies with various smoothness constraints. Global minimization of these energy functions is NP-hard even in the simplest discontinuity-preserving case. Therefore, our focus is on efficient approximation algorithms. We present two algorithms based on graph cuts that efficiently find a local minimum with respect to two types of large moves, namely expansion moves and swap moves. These moves can simultaneously change the labels of arbitrarily large sets of pixels. In contrast, many standard algorithms (including simulated annealing) use small moves where only one pixel changes its label at a time. Our expansion algorithm finds a labeling within a known factor of the global minimum, while our swap algorithm handles more general energy functions. Both of these algorithms allow important cases of discontinuity preserving energies. We experimentally demonstrate the effectiveness of our approach for image restoration, stereo and motion. On real data with ground truth, we achieve 98 percent accuracy.}, file = {Boykov2001.pdf:Graph/Boykov2001.pdf:PDF}, owner = {喻垚慎}, publisher = {IEEE}, timestamp = {2013.07.04} } @Inproceedings{Lafferty2001, author = {John Lafferty and Andrew McCallum and Fernando Pereira}, booktitle = {Proceedings of the Eighteenth International Conference on Machine Learning}, language = {English}, title = {Conditional Random Fields: Probabilistic Models for Segmenting and Labeling Sequence Data}, year = {2001}, address = {San Francisco, CA, USA}, pages = {282--289}, publisher = {Morgan Kaufmann Publishers Inc.}, series = {ICML '01}, abstract = {We present conditional random fields, a framework for building probabilistic models to segment and label sequence data. Conditional random fields offer several advantages over hidden Markov models and stochastic grammars for such tasks, including the ability to relax strong independence assumptions made in those models. Conditional random fields also avoid a fundamental limitation of maximum entropy Markov models (MEMMs) and other discriminative Markov models based on directed graphical models, which can be biased towards states with few successor states. We present iterative parameter estimation algorithms for conditional random fields and compare the performance of the resulting models to HMMs and MEMMs on synthetic and natural-language data.}, acmid = {655813}, file = {Lafferty2001.pdf:Graph/Lafferty2001.pdf:PDF}, isbn = {1-55860-778-1}, numpages = {8}, owner = {喻垚慎}, timestamp = {2013.06.17} }