Stop adding papers

Signed-off-by: Riccardo Finotello <riccardo.finotello@gmail.com>
This commit is contained in:
2020-10-10 19:11:29 +02:00
parent 28d302b06b
commit 46bdb747ec
12 changed files with 803 additions and 184 deletions

View File

@@ -243,6 +243,24 @@
number = {1}
}
@article{Anderson:2016:NewConstructionCalabiYau,
title = {A {{New Construction}} of {{Calabi}}-{{Yau Manifolds}}: {{Generalized CICYs}}},
shorttitle = {A {{New Construction}} of {{Calabi}}-{{Yau Manifolds}}},
author = {Anderson, Lara B. and Apruzzi, Fabio and Gao, Xin and Gray, James and Lee, Seung-Joo},
date = {2016},
journaltitle = {Nuclear Physics B},
shortjournal = {Nuclear Physics B},
volume = {906},
pages = {441--496},
issn = {05503213},
doi = {10.1016/j.nuclphysb.2016.03.016},
abstract = {We present a generalization of the complete intersection in products of projective space (CICY) construction of Calabi-Yau manifolds. CICY three-folds and four-folds have been studied extensively in the physics literature. Their utility stems from the fact that they can be simply described in terms of a `configuration matrix', a matrix of integers from which many of the details of the geometries can be easily extracted. The generalization we present is to allow negative integers in the configuration matrices which were previously taken to have positive semi-definite entries. This broadening of the complete intersection construction leads to a larger class of Calabi-Yau manifolds than that considered in previous work, which nevertheless enjoys much of the same degree of calculational control. These new Calabi-Yau manifolds are complete intersections in (not necessarily Fano) ambient spaces with an effective anticanonical class. We find examples with topology distinct from any that has appeared in the literature to date. The new manifolds thus obtained have many interesting features. For example, they can have smaller Hodge numbers than ordinary CICYs and lead to many examples with elliptic and K3-fibration structures relevant to F-theory and string dualities.},
archivePrefix = {arXiv},
eprint = {1507.03235},
eprinttype = {arxiv},
file = {/home/riccardo/.local/share/zotero/files/anderson_et_al_2016_a_new_construction_of_calabi-yau_manifolds2.pdf;/home/riccardo/.local/share/zotero/storage/GWD2QTX5/1507.html}
}
@article{Anderson:2017:FibrationsCICYThreefolds,
title = {Fibrations in {{CICY}} Threefolds},
author = {Anderson, Lara B. and Gao, Xin and Gray, James and Lee, Seung-Joo},
@@ -303,6 +321,21 @@
file = {/home/riccardo/.local/share/zotero/files/angelantonj_sagnotti_2002_open_strings.pdf}
}
@online{Ardizzone:2019:AnalyzingInverseProblems,
title = {Analyzing {{Inverse Problems}} with {{Invertible Neural Networks}}},
author = {Ardizzone, Lynton and Kruse, Jakob and Wirkert, Sebastian and Rahner, Daniel and Pellegrini, Eric W. and Klessen, Ralf S. and Maier-Hein, Lena and Rother, Carsten and Köthe, Ullrich},
date = {2019-02-06},
url = {http://arxiv.org/abs/1808.04730},
urldate = {2020-10-10},
abstract = {In many tasks, in particular in natural science, the goal is to determine hidden system parameters from a set of measurements. Often, the forward process from parameter- to measurement-space is a well-defined function, whereas the inverse problem is ambiguous: one measurement may map to multiple different sets of parameters. In this setting, the posterior parameter distribution, conditioned on an input measurement, has to be determined. We argue that a particular class of neural networks is well suited for this task -- so-called Invertible Neural Networks (INNs). Although INNs are not new, they have, so far, received little attention in literature. While classical neural networks attempt to solve the ambiguous inverse problem directly, INNs are able to learn it jointly with the well-defined forward process, using additional latent output variables to capture the information otherwise lost. Given a specific measurement and sampled latent variables, the inverse pass of the INN provides a full distribution over parameter space. We verify experimentally, on artificial data and real-world problems from astrophysics and medicine, that INNs are a powerful analysis tool to find multi-modalities in parameter space, to uncover parameter correlations, and to identify unrecoverable parameters.},
archivePrefix = {arXiv},
eprint = {1808.04730},
eprinttype = {arxiv},
file = {/home/riccardo/.local/share/zotero/files/ardizzone_et_al_2019_analyzing_inverse_problems_with_invertible_neural_networks.pdf;/home/riccardo/.local/share/zotero/storage/NQJPI658/1808.html},
keywords = {⛔ No DOI found},
primaryClass = {cs, stat}
}
@online{Arduino:2020:OriginDivergencesTimeDependent,
title = {On the {{Origin}} of {{Divergences}} in {{Time}}-{{Dependent Orbifolds}}},
author = {Arduino, Andrea and Finotello, Riccardo and Pesando, Igor},
@@ -1144,7 +1177,6 @@
journaltitle = {Machine learning},
volume = {20},
pages = {273--297},
publisher = {{Springer}},
file = {/home/riccardo/.local/share/zotero/files/cortes_vapnik_1995_support-vector_networks.pdf},
keywords = {❓ Multiple DOI},
number = {3}
@@ -2060,6 +2092,28 @@
number = {1}
}
@incollection{Goodfellow:2014:GenerativeAdversarialNets,
title = {Generative Adversarial Nets},
booktitle = {Advances in Neural Information Processing Systems 27},
author = {Goodfellow, Ian and Pouget-Abadie, Jean and Mirza, Mehdi and Xu, Bing and Warde-Farley, David and Ozair, Sherjil and Courville, Aaron and Bengio, Yoshua},
editor = {Ghahramani, Z. and Welling, M. and Cortes, C. and Lawrence, N. D. and Weinberger, K. Q.},
date = {2014},
pages = {2672--2680},
publisher = {{Curran Associates, Inc.}},
url = {http://papers.nips.cc/paper/5423-generative-adversarial-nets.pdf}
}
@inproceedings{Gori:2005:NewModelLearning,
title = {A New Model for Learning in Graph Domains},
booktitle = {Proceedings. 2005 {{IEEE}} International Joint Conference on Neural Networks, 2005.},
author = {Gori, Marco and Monfardini, Gabriele and Scarselli, Franco},
date = {2005},
volume = {2},
pages = {729--734},
doi = {10.1109/IJCNN.2005.1555942},
organization = {{IEEE}}
}
@article{Grana:2006:FluxCompactificationsString,
ids = {Grana:2005:FluxCompactificationsString},
title = {Flux Compactifications in String Theory: {{A}} Comprehensive Review},
@@ -2092,6 +2146,42 @@
series = {{{SpringerBriefs}} in {{Physics}}}
}
@article{Gray:2013:AllCompleteIntersection,
title = {All {{Complete Intersection Calabi}}-{{Yau Four}}-{{Folds}}},
author = {Gray, James and Haupt, Alexander S. and Lukas, Andre},
date = {2013-07},
journaltitle = {Journal of High Energy Physics},
shortjournal = {J. High Energ. Phys.},
volume = {2013},
pages = {70},
issn = {1029-8479},
doi = {10.1007/JHEP07(2013)070},
abstract = {We present an exhaustive, constructive, classification of the Calabi-Yau four-folds which can be described as complete intersections in products of projective spaces. A comprehensive list of 921,497 configuration matrices which represent all topologically distinct types of complete intersection Calabi-Yau four-folds is provided and can be downloaded at http://www-thphys.physics.ox.ac.uk/projects/CalabiYau/Cicy4folds/index.html . The manifolds have non-negative Euler characteristics in the range 0 - 2610. This data set will be of use in a wide range of physical and mathematical applications. Nearly all of these four-folds are elliptically fibered and are thus of interest for F-theory model building.},
archivePrefix = {arXiv},
eprint = {1303.1832},
eprinttype = {arxiv},
file = {/home/riccardo/.local/share/zotero/files/gray_et_al_2013_all_complete_intersection_calabi-yau_four-folds2.pdf;/home/riccardo/.local/share/zotero/storage/B4K3HHPX/1303.html},
number = {7}
}
@article{Gray:2014:TopologicalInvariantsFibration,
title = {Topological {{Invariants}} and {{Fibration Structure}} of {{Complete Intersection Calabi}}-{{Yau Four}}-{{Folds}}},
author = {Gray, James and Haupt, Alexander S. and Lukas, Andre},
date = {2014-09},
journaltitle = {Journal of High Energy Physics},
shortjournal = {J. High Energ. Phys.},
volume = {2014},
pages = {93},
issn = {1029-8479},
doi = {10.1007/JHEP09(2014)093},
abstract = {We investigate the mathematical properties of the class of Calabi-Yau four-folds recently found in [arXiv:1303.1832]. This class consists of 921,497 configuration matrices which correspond to manifolds that are described as complete intersections in products of projective spaces. For each manifold in the list, we compute the full Hodge diamond as well as additional topological invariants such as Chern classes and intersection numbers. Using this data, we conclude that there are at least 36,779 topologically distinct manifolds in our list. We also study the fibration structure of these manifolds and find that 99.95 percent can be described as elliptic fibrations. In total, we find 50,114,908 elliptic fibrations, demonstrating the multitude of ways in which many manifolds are fibered. A sub-class of 26,088,498 fibrations satisfy necessary conditions for admitting sections. The complete data set can be downloaded at http://www-thphys.physics.ox.ac.uk/projects/CalabiYau/Cicy4folds/index.html .},
archivePrefix = {arXiv},
eprint = {1405.2073},
eprinttype = {arxiv},
file = {/home/riccardo/.local/share/zotero/files/gray_et_al_2014_topological_invariants_and_fibration_structure_of_complete_intersection2.pdf;/home/riccardo/.local/share/zotero/storage/GWDFUFYW/1405.html},
number = {9}
}
@article{Green:1987:CalabiYauManifoldsComplete,
title = {Calabi-{{Yau}} Manifolds as Complete Intersections in Products of Complex Projective Spaces},
author = {Green, Paul and Hübsch, Tristan},
@@ -2661,6 +2751,35 @@
number = {8}
}
@online{Kingma:2014:AutoEncodingVariationalBayes,
title = {Auto-{{Encoding Variational Bayes}}},
author = {Kingma, Diederik P. and Welling, Max},
date = {2014-05-01},
url = {http://arxiv.org/abs/1312.6114},
urldate = {2020-10-10},
abstract = {How can we perform efficient inference and learning in directed probabilistic models, in the presence of continuous latent variables with intractable posterior distributions, and large datasets? We introduce a stochastic variational inference and learning algorithm that scales to large datasets and, under some mild differentiability conditions, even works in the intractable case. Our contributions is two-fold. First, we show that a reparameterization of the variational lower bound yields a lower bound estimator that can be straightforwardly optimized using standard stochastic gradient methods. Second, we show that for i.i.d. datasets with continuous latent variables per datapoint, posterior inference can be made especially efficient by fitting an approximate inference model (also called a recognition model) to the intractable posterior using the proposed lower bound estimator. Theoretical advantages are reflected in experimental results.},
archivePrefix = {arXiv},
eprint = {1312.6114},
eprinttype = {arxiv},
file = {/home/riccardo/.local/share/zotero/files/kingma_welling_2014_auto-encoding_variational_bayes2.pdf;/home/riccardo/.local/share/zotero/storage/KYP8BISG/1312.html},
keywords = {⛔ No DOI found},
primaryClass = {cs, stat}
}
@online{Kingma:2017:AdamMethodStochastic,
title = {Adam: {{A Method}} for {{Stochastic Optimization}}},
shorttitle = {Adam},
author = {Kingma, Diederik P. and Ba, Jimmy},
date = {2017},
abstract = {We introduce Adam, an algorithm for first-order gradient-based optimization of stochastic objective functions, based on adaptive estimates of lower-order moments. The method is straightforward to implement, is computationally efficient, has little memory requirements, is invariant to diagonal rescaling of the gradients, and is well suited for problems that are large in terms of data and/or parameters. The method is also appropriate for non-stationary objectives and problems with very noisy and/or sparse gradients. The hyper-parameters have intuitive interpretations and typically require little tuning. Some connections to related algorithms, on which Adam was inspired, are discussed. We also analyze the theoretical convergence properties of the algorithm and provide a regret bound on the convergence rate that is comparable to the best known results under the online convex optimization framework. Empirical results demonstrate that Adam works well in practice and compares favorably to other stochastic optimization methods. Finally, we discuss AdaMax, a variant of Adam based on the infinity norm.},
archivePrefix = {arXiv},
eprint = {1412.6980},
eprinttype = {arxiv},
file = {/home/riccardo/.local/share/zotero/files/kingma_ba_2017_adam3.pdf;/home/riccardo/.local/share/zotero/storage/9JQ8YQL7/1412.html},
keywords = {⛔ No DOI found},
primaryClass = {cs}
}
@online{Kingma:2017:AdamMethodStochastica,
ids = {Kingma:2017:AdamMethodStochastic},
title = {Adam: {{A Method}} for {{Stochastic Optimization}}},
@@ -2968,6 +3087,15 @@
series = {Lecture {{Notes}} in {{Computer Science}}}
}
@inproceedings{Monti:2017:GeometricDeepLearning,
title = {Geometric Deep Learning on Graphs and Manifolds Using Mixture Model Cnns},
booktitle = {Proceedings of the {{IEEE}} Conference on Computer Vision and Pattern Recognition},
author = {Monti, Federico and Boscaini, Davide and Masci, Jonathan and Rodola, Emanuele and Svoboda, Jan and Bronstein, Michael M.},
date = {2017},
pages = {5115--5124},
keywords = {⛔ No DOI found}
}
@article{Mutter:2019:DeepLearningHeterotic,
title = {Deep Learning in the Heterotic Orbifold Landscape},
author = {Mütter, Andreas and Parr, Erik and Vaudrevange, Patrick K. S.},
@@ -3367,6 +3495,21 @@
number = {1}
}
@online{Rezende:2014:StochasticBackpropagationApproximate,
title = {Stochastic {{Backpropagation}} and {{Approximate Inference}} in {{Deep Generative Models}}},
author = {Rezende, Danilo Jimenez and Mohamed, Shakir and Wierstra, Daan},
date = {2014-05-30},
url = {http://arxiv.org/abs/1401.4082},
urldate = {2020-10-10},
abstract = {We marry ideas from deep neural networks and approximate Bayesian inference to derive a generalised class of deep, directed generative models, endowed with a new algorithm for scalable inference and learning. Our algorithm introduces a recognition model to represent approximate posterior distributions, and that acts as a stochastic encoder of the data. We develop stochastic back-propagation -- rules for back-propagation through stochastic variables -- and use this to develop an algorithm that allows for joint optimisation of the parameters of both the generative and recognition model. We demonstrate on several real-world data sets that the model generates realistic samples, provides accurate imputations of missing data and is a useful tool for high-dimensional data visualisation.},
archivePrefix = {arXiv},
eprint = {1401.4082},
eprinttype = {arxiv},
file = {/home/riccardo/.local/share/zotero/files/rezende_et_al_2014_stochastic_backpropagation_and_approximate_inference_in_deep_generative_models2.pdf;/home/riccardo/.local/share/zotero/storage/HKC6H5VK/1401.html},
keywords = {⛔ No DOI found},
primaryClass = {cs, stat}
}
@article{Rudolph:1994:ConvergenceAnalysisCanonical,
title = {Convergence Analysis of Canonical Genetic Algorithms},
author = {Rudolph, Günter},
@@ -3424,6 +3567,25 @@
number = {6088}
}
@inproceedings{Salimans:2015:MarkovChainMonte,
title = {Markov Chain Monte Carlo and Variational Inference: {{Bridging}} the Gap},
booktitle = {International Conference on Machine Learning},
author = {Salimans, Tim and Kingma, Diederik and Welling, Max},
date = {2015},
pages = {1218--1226},
keywords = {⛔ No DOI found}
}
@inproceedings{Scarselli:2004:GraphicalbasedLearningEnvironments,
title = {Graphical-Based Learning Environments for Pattern Recognition},
booktitle = {Joint {{IAPR}} International Workshops on Statistical Techniques in Pattern Recognition ({{SPR}}) and Structural and Syntactic Pattern Recognition ({{SSPR}})},
author = {Scarselli, Franco and Tsoi, Ah Chung and Gori, Marco and Hagenbuchner, Markus},
date = {2004},
pages = {42--56},
keywords = {⛔ No DOI found},
organization = {{Springer}}
}
@online{Schellekens:2017:BigNumbersString,
title = {Big {{Numbers}} in {{String Theory}}},
author = {Schellekens, A. N.},
@@ -3881,6 +4043,15 @@
file = {/home/riccardo/.local/share/zotero/files/zheng_casari_2018_feature_engineering_for_machine_learning.pdf}
}
@inproceedings{Zhu:2017:UnpairedImagetoimageTranslation,
title = {Unpaired Image-to-Image Translation Using Cycle-Consistent Adversarial Networks},
booktitle = {Proceedings of the {{IEEE}} International Conference on Computer Vision},
author = {Zhu, Jun-Yan and Park, Taesung and Isola, Phillip and Efros, Alexei A},
date = {2017},
pages = {2223--2232},
keywords = {⛔ No DOI found}
}
@book{Zwiebach::FirstCourseString,
title = {A {{First Course}} in {{String Theory}}},
author = {Zwiebach, Barton},