Valid HTML 4.0! Valid CSS!
%%% -*-BibTeX-*-
%%% ====================================================================
%%%  BibTeX-file{
%%%     author          = "Nelson H. F. Beebe",
%%%     version         = "1.02",
%%%     date            = "20 August 2024",
%%%     time            = "09:28:24 MDT",
%%%     filename        = "jds.bib",
%%%     address         = "University of Utah
%%%                        Department of Mathematics, 110 LCB
%%%                        155 S 1400 E RM 233
%%%                        Salt Lake City, UT 84112-0090
%%%                        USA",
%%%     telephone       = "+1 801 581 5254",
%%%     URL             = "http://www.math.utah.edu/~beebe",
%%%     checksum        = "28725 484 2238 21449",
%%%     email           = "beebe at math.utah.edu, beebe at acm.org,
%%%                        beebe at computer.org (Internet)",
%%%     codetable       = "ISO/ASCII",
%%%     keywords        = "bibliography; BibTeX: ACM/IMS Journal of Data
%%%                        Science",
%%%     license         = "public domain",
%%%     supported       = "yes",
%%%     docstring       = "This is a COMPLETE bibliography of the
%%%                        ACM/IMS Journal of Data Science (CODEN none,
%%%                        ISSN 2831-3194), a joint journal of the
%%%                        Association for Computing Machinery (ACM) and
%%%                        the Institute of Mathematical Statistics
%%%                        (IMS).  Publication began with volume 1,
%%%                        number 1, in March 2024.
%%%
%%%                        The journal has Web sites at
%%%
%%%                            https://dl.acm.org/journal/jds
%%%                            https://dl.acm.org/loi/jds
%%%                            https://dl.acm.org/toc/jds/2024/1/1
%%%
%%%                        At version 1.02, the COMPLETE year coverage
%%%                        looked like this:
%%%
%%%                             2024 (  12)
%%%
%%%                             Article:         12
%%%
%%%                             Total entries:   12
%%%
%%%                        BibTeX citation tags are uniformly chosen
%%%                        as name:year:abbrev, where name is the
%%%                        family name of the first author or editor,
%%%                        year is a 4-digit number, and abbrev is a
%%%                        3-letter condensation of important title
%%%                        words. Citation tags were automatically
%%%                        generated by software developed for the
%%%                        BibNet Project.
%%%
%%%                        In this bibliography, journal entries are
%%%                        sorted in publication order, using
%%%                        bibsort -byvolume.
%%%
%%%                        The checksum field above contains a CRC-16
%%%                        checksum as the first value, followed by the
%%%                        equivalent of the standard UNIX wc (word
%%%                        count) utility output of lines, words, and
%%%                        characters.  This is produced by Robert
%%%                        Solovay's checksum utility.",
%%%  }
%%% ====================================================================
@Preamble{"\input bibnames.sty" #
    "\def \TM {${}^{\sc TM}$}"
}

%%% ====================================================================
%%% Acknowledgement abbreviations:
@String{ack-nhfb = "Nelson H. F. Beebe,
                    University of Utah,
                    Department of Mathematics, 110 LCB,
                    155 S 1400 E RM 233,
                    Salt Lake City, UT 84112-0090, USA,
                    Tel: +1 801 581 5254,
                    e-mail: \path|beebe@math.utah.edu|,
                            \path|beebe@acm.org|,
                            \path|beebe@computer.org| (Internet),
                    URL: \path|https://www.math.utah.edu/~beebe/|"}

%%% ====================================================================
%%% Journal abbreviations:
@String{j-ACM-IMS-J-DATA-SCI = "ACM\slash IMS Journal of Data Science"}

%%% ====================================================================
%%% Bibliography entries, sorted in publication order with ``bibsort
%%% --byvolume'':
@Article{Bradic:2024:AIJ,
  author =       "Jelena Bradic and Stratos Idreos and John Lafferty",
  title =        "{{ACM\slash} IMS} Journal of Data Science: Inaugural
                 Issue Editorial",
  journal =      j-ACM-IMS-J-DATA-SCI,
  volume =       "1",
  number =       "1",
  pages =        "1:1--1:??",
  month =        mar,
  year =         "2024",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3644102",
  ISSN =         "2831-3194",
  ISSN-L =       "2831-3194",
  bibdate =      "Wed Apr 3 11:24:34 MDT 2024",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/jds.bib",
  URL =          "https://dl.acm.org/doi/10.1145/3644102",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM\slash IMS J. Data Sci.",
  articleno =    "1e",
  fjournal =     "ACM\slash IMS Journal of Data Science",
  journal-URL =  "https://dl.acm.org/loi/jds",
}

@Article{Gu:2024:BNB,
  author =       "Quanquan Gu and Amin Karbasi and Khashayar Khosravi
                 and Vahab Mirrokni and Dongruo Zhou",
  title =        "Batched Neural Bandits",
  journal =      j-ACM-IMS-J-DATA-SCI,
  volume =       "1",
  number =       "1",
  pages =        "1:1--1:??",
  month =        mar,
  year =         "2024",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3592474",
  ISSN =         "2831-3194",
  ISSN-L =       "2831-3194",
  bibdate =      "Wed Apr 3 11:24:34 MDT 2024",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/jds.bib",
  URL =          "https://dl.acm.org/doi/10.1145/3592474",
  abstract =     "In many sequential decision-making problems, the
                 individuals are split into several batches and the
                 decision-maker is only allowed to change her policy at
                 the end of batches. These batch problems have a large
                 number of applications, ranging from clinical
                 \ldots{}",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM\slash IMS J. Data Sci.",
  articleno =    "1",
  fjournal =     "ACM\slash IMS Journal of Data Science",
  journal-URL =  "https://dl.acm.org/loi/jds",
}

@Article{Heidari:2024:RFI,
  author =       "Alireza Heidari and George Michalopoulos and Ihab F.
                 Ilyas and Theodoros Rekatsinas",
  title =        "Record Fusion via Inference and Data Augmentation",
  journal =      j-ACM-IMS-J-DATA-SCI,
  volume =       "1",
  number =       "1",
  pages =        "2:1--2:??",
  month =        mar,
  year =         "2024",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3593579",
  ISSN =         "2831-3194",
  ISSN-L =       "2831-3194",
  bibdate =      "Wed Apr 3 11:24:34 MDT 2024",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/jds.bib",
  URL =          "https://dl.acm.org/doi/10.1145/3593579",
  abstract =     "We introduce a learning framework for the problem of
                 unifying conflicting data in multiple records referring
                 to the same entity --- we call this problem ``record
                 fusion.'' Record fusion generalizes two known problems:
                 ``data fusion'' and ``golden record.'' Our \ldots{}",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM\slash IMS J. Data Sci.",
  articleno =    "2",
  fjournal =     "ACM\slash IMS Journal of Data Science",
  journal-URL =  "https://dl.acm.org/loi/jds",
}

@Article{Opipari:2024:DDN,
  author =       "Anthony Opipari and Jana Pavlasek and Chao Chen and
                 Shoutian Wang and Karthik Desingh and Odest Chadwicke
                 Jenkins",
  title =        "{DNBP}: Differentiable Nonparametric Belief
                 Propagation",
  journal =      j-ACM-IMS-J-DATA-SCI,
  volume =       "1",
  number =       "1",
  pages =        "3:1--3:??",
  month =        mar,
  year =         "2024",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3592762",
  ISSN =         "2831-3194",
  ISSN-L =       "2831-3194",
  bibdate =      "Wed Apr 3 11:24:34 MDT 2024",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/jds.bib",
  URL =          "https://dl.acm.org/doi/10.1145/3592762",
  abstract =     "We present a differentiable approach to learn the
                 probabilistic factors used for inference by a
                 nonparametric belief propagation algorithm. Existing
                 nonparametric belief propagation methods rely on
                 domain-specific features encoded in the probabilistic
                 \ldots{}",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM\slash IMS J. Data Sci.",
  articleno =    "3",
  fjournal =     "ACM\slash IMS Journal of Data Science",
  journal-URL =  "https://dl.acm.org/loi/jds",
}

@Article{Kang:2024:DMM,
  author =       "Daniel Kang and John Guibas and Peter Bailis and
                 Tatsunori Hashimoto and Yi Sun and Matei Zaharia",
  title =        "Data Management for {ML}-Based Analytics and Beyond",
  journal =      j-ACM-IMS-J-DATA-SCI,
  volume =       "1",
  number =       "1",
  pages =        "4:1--4:??",
  month =        mar,
  year =         "2024",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3611093",
  ISSN =         "2831-3194",
  ISSN-L =       "2831-3194",
  bibdate =      "Wed Apr 3 11:24:34 MDT 2024",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/jds.bib",
  URL =          "https://dl.acm.org/doi/10.1145/3611093",
  abstract =     "The increasing capabilities of machine learning (ML)
                 has enabled the deployment of ML methods in a variety
                 of applications, ranging from unstructured data
                 analytics to autonomous vehicles. Due to the volumes of
                 data over which ML is deployed, it is \ldots{}",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM\slash IMS J. Data Sci.",
  articleno =    "4",
  fjournal =     "ACM\slash IMS Journal of Data Science",
  journal-URL =  "https://dl.acm.org/loi/jds",
}

@Article{Miao:2024:ISE,
  author =       "Wang Miao and Lan Liu and Yilin Li and Eric J.
                 Tchetgen Tchetgen and Zhi Geng",
  title =        "Identification and Semiparametric Efficiency Theory of
                 Nonignorable Missing Data with a Shadow Variable",
  journal =      j-ACM-IMS-J-DATA-SCI,
  volume =       "1",
  number =       "2",
  pages =        "5:1--5:??",
  month =        jun,
  year =         "2024",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3592389",
  ISSN =         "2831-3194",
  ISSN-L =       "2831-3194",
  bibdate =      "Tue Aug 20 09:27:05 MDT 2024",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/jds.bib",
  URL =          "https://dl.acm.org/doi/10.1145/3592389",
  abstract =     "We consider identification and estimation with an
                 outcome missing not at random (MNAR). We study an
                 identification strategy based on a so-called shadow
                 variable. A shadow variable is assumed to be correlated
                 with the outcome but independent of the \ldots{}
                 Highlights Problem statement Missingness not at random
                 (MNAR) arises in many empirical studies in biomedical,
                 socioeconomic, and epidemiological researches. A
                 fundamental problem of MNAR is the identification
                 problem, that is, the parameter of interest \ldots{}",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM\slash IMS J. Data Sci.",
  articleno =    "5",
  fjournal =     "ACM\slash IMS Journal of Data Science",
  journal-URL =  "https://dl.acm.org/loi/jds",
}

@Article{Zhou:2024:ORU,
  author =       "Lijia Zhou and Frederic Koehler and Danica J.
                 Sutherland and Nathan Srebro",
  title =        "Optimistic Rates: a Unifying Theory for Interpolation
                 Learning and Regularization in Linear Regression",
  journal =      j-ACM-IMS-J-DATA-SCI,
  volume =       "1",
  number =       "2",
  pages =        "6:1--6:??",
  month =        jun,
  year =         "2024",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3594234",
  ISSN =         "2831-3194",
  ISSN-L =       "2831-3194",
  bibdate =      "Tue Aug 20 09:27:05 MDT 2024",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/jds.bib",
  URL =          "https://dl.acm.org/doi/10.1145/3594234",
  abstract =     "We study a localized notion of uniform convergence
                 known as an ``optimistic rate'' [ 34 , 39 ] for linear
                 regression with Gaussian data. Our refined analysis
                 avoids the hidden constant and logarithmic factor in
                 existing results, which are known to be crucial
                 \ldots{} Highlights Problem Statement Generalization
                 theory proposes to explain the ability of machine
                 learning models to generalize to fresh examples by
                 bounding the gap between the test error (error on new
                 examples) and training error (error on the data they
                 \ldots{})",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM\slash IMS J. Data Sci.",
  articleno =    "6",
  fjournal =     "ACM\slash IMS Journal of Data Science",
  journal-URL =  "https://dl.acm.org/loi/jds",
}

@Article{Smith:2024:LML,
  author =       "Ryan Smith and Jason A. Fries and Braden Hancock and
                 Stephen H. Bach",
  title =        "Language Models in the Loop: Incorporating Prompting
                 into Weak Supervision",
  journal =      j-ACM-IMS-J-DATA-SCI,
  volume =       "1",
  number =       "2",
  pages =        "7:1--7:??",
  month =        jun,
  year =         "2024",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3617130",
  ISSN =         "2831-3194",
  ISSN-L =       "2831-3194",
  bibdate =      "Tue Aug 20 09:27:05 MDT 2024",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/jds.bib",
  URL =          "https://dl.acm.org/doi/10.1145/3617130",
  abstract =     "We propose a new strategy for applying large
                 pre-trained language models to novel tasks when labeled
                 training data is limited. Rather than apply the model
                 in a typical zero-shot or few-shot fashion, we treat
                 the model as the basis for labeling functions \ldots{}
                 Highlights Problem statement The goal of this paper is
                 to use large language models to create smaller,
                 specialized models. These specialized models can be
                 better suited to specific tasks because they are tuned
                 for them and are less expensive to serve in \ldots{}",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM\slash IMS J. Data Sci.",
  articleno =    "7",
  fjournal =     "ACM\slash IMS Journal of Data Science",
  journal-URL =  "https://dl.acm.org/loi/jds",
}

@Article{Waleffe:2024:PCN,
  author =       "Roger Waleffe and Theodoros Rekatsinas",
  title =        "Principal Component Networks: Utilizing Low-Rank
                 Activation Structure to Reduce Parameters Early in
                 Training",
  journal =      j-ACM-IMS-J-DATA-SCI,
  volume =       "1",
  number =       "2",
  pages =        "8:1--8:??",
  month =        jun,
  year =         "2024",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3617778",
  ISSN =         "2831-3194",
  ISSN-L =       "2831-3194",
  bibdate =      "Tue Aug 20 09:27:05 MDT 2024",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/jds.bib",
  URL =          "https://dl.acm.org/doi/10.1145/3617778",
  abstract =     "Recent works show that overparameterized neural
                 networks contain small subnetworks that exhibit
                 comparable accuracy to the full model when trained in
                 isolation. These results highlight the potential to
                 reduce the computational costs of deep neural network
                 \ldots{} Highlights Problem Statement Many recent
                 results show that large neural networks can lead to
                 improved generalization. Yet, training these large
                 models comes with increased computational costs. In an
                 effort to address this issue, several works have show
                 \ldots{}",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM\slash IMS J. Data Sci.",
  articleno =    "8",
  fjournal =     "ACM\slash IMS Journal of Data Science",
  journal-URL =  "https://dl.acm.org/loi/jds",
}

@Article{Li:2024:PIN,
  author =       "Zongyi Li and Hongkai Zheng and Nikola Kovachki and
                 David Jin and Haoxuan Chen and Burigede Liu and Kamyar
                 Azizzadenesheli and Anima Anandkumar",
  title =        "Physics-Informed Neural Operator for Learning Partial
                 Differential Equations",
  journal =      j-ACM-IMS-J-DATA-SCI,
  volume =       "1",
  number =       "3",
  pages =        "9:1--9:??",
  month =        sep,
  year =         "2024",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3648506",
  ISSN =         "2831-3194",
  ISSN-L =       "2831-3194",
  bibdate =      "Tue Aug 20 09:27:05 MDT 2024",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/jds.bib",
  URL =          "https://dl.acm.org/doi/10.1145/3648506",
  abstract =     "In this article, we propose physics-informed neural
                 operators (PINO) that combine training data and physics
                 constraints to learn the solution operator of a given
                 family of parametric Partial Differential Equations
                 (PDE). PINO is the first hybrid approach \ldots{}
                 Highlights PROBLEM STATEMENT Machine learning methods
                 have recently shown promise in solving partial
                 differential equations (PDEs) raised in science and
                 engineering. They can be classified into two broad
                 categories: approximating the solution function
                 \ldots{}",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM\slash IMS J. Data Sci.",
  articleno =    "9",
  fjournal =     "ACM\slash IMS Journal of Data Science",
  journal-URL =  "https://dl.acm.org/loi/jds",
}

@Article{Waudby-Smith:2024:AVP,
  author =       "Ian Waudby-Smith and Lili Wu and Aaditya Ramdas and
                 Nikos Karampatziakis and Paul Mineiro",
  title =        "Anytime-valid off-policy Inference for Contextual
                 Bandits",
  journal =      j-ACM-IMS-J-DATA-SCI,
  volume =       "1",
  number =       "3",
  pages =        "10:1--10:??",
  month =        sep,
  year =         "2024",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3643693",
  ISSN =         "2831-3194",
  ISSN-L =       "2831-3194",
  bibdate =      "Tue Aug 20 09:27:05 MDT 2024",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/jds.bib",
  URL =          "https://dl.acm.org/doi/10.1145/3643693",
  abstract =     "Contextual bandit algorithms are ubiquitous tools for
                 active sequential experimentation in healthcare and the
                 tech industry. They involve online learning algorithms
                 that adaptively learn policies over time to map
                 observed contexts X$_t$ to actions A$_t$ in an \ldots{}
                 Highlights PROBLEM STATEMENT Contextual bandits and
                 adaptive experimentation are becoming increasingly
                 commonplace in the tech industry and health sciences.
                 The problem setting consists of (at each time t )
                 observing a context X$_t$, taking a randomized
                 \ldots{}",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM\slash IMS J. Data Sci.",
  articleno =    "10",
  fjournal =     "ACM\slash IMS Journal of Data Science",
  journal-URL =  "https://dl.acm.org/loi/jds",
}

@Article{Belkin:2024:NML,
  author =       "Mikhail Belkin",
  title =        "The Necessity of Machine Learning Theory in Mitigating
                 {AI} Risk",
  journal =      j-ACM-IMS-J-DATA-SCI,
  volume =       "1",
  number =       "3",
  pages =        "11:1--11:??",
  month =        sep,
  year =         "2024",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3643694",
  ISSN =         "2831-3194",
  ISSN-L =       "2831-3194",
  bibdate =      "Tue Aug 20 09:27:05 MDT 2024",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/jds.bib",
  URL =          "https://dl.acm.org/doi/10.1145/3643694",
  abstract =     "Highlights SUMMARY In the last years we have witnessed
                 rapidly accelerating progress in Neural Network-based
                 Artificial Intelligence. Yet our fundamental
                 understanding of these methods has lagged far behind.
                 Never before had a technology been developed \ldots{}",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM\slash IMS J. Data Sci.",
  articleno =    "11",
  fjournal =     "ACM\slash IMS Journal of Data Science",
  journal-URL =  "https://dl.acm.org/loi/jds",
}

%%% [20-Aug-2024] check incomplete v1n3