OXFORD UNIVERSITY  COMPUTING LABORATORY

BibTeX entries

@UNPUBLISHED{adt,
  author = {Jeremy Gibbons},
  title = {Unfolding Abstract Datatypes},
  note = {Submitted for publication},
  month = JAN,
  year = {2008},
  url = {http://www.comlab.ox.ac.uk/jeremy.gibbons/publications/adt.pdf},
  abstract = {One of the most fundamental tools in the programmer's toolbox is the abstract datatype. However, abstract datatypes are not widely used in functional programming, perhaps because they are not subject to familiar proof methods such as equational reasoning and induction~--- in essence, because they are a form of codata rather than a form of data. We show that proof methods for corecursive programs are the appropriate techniques to use. In particular, building on established work on final coalgebra semantics for object-oriented programs, we show that the reasoning principles of unfold operators are perfectly suited for the task. We illustrate with solutions to two recent problems in the literature.}
}
@INPROCEEDINGS{wsrfmodeling,
  author = {Tianyi Zang and Radu Calinescu and Steve Harris and Andrew Tsui and Marta Kwiatkowska and Jeremy Gibbons and Jim Davies and Peter Maccallum and Carlos Caldas},
  title = {{WSRF}-Based Modeling of Clinical Trial Information for Collaborative Cancer Research},
  booktitle = {8th IEEE International Symposium on Cluster Computing and the Grid (CCGrid08)},
  note = {To appear},
  year = 2008,
  month = MAY,
  url = {http://www.comlab.ox.ac.uk/jeremy.gibbons/publications/wsrfmodeling.pdf},
  abstract = {The CancerGrid consortium is developing open-standards cancer informatics to address the challenges posed by the modern cancer clinical trials. This paper presents a service-oriented software paradigm implemented in CancerGrid to derive clinical trial information management systems for collaborative cancer research across multiple institutions. Our proposal is founded on a combination of a clinical trial (meta)model and WSRF (Web Services Resource Framework),and is currently being evaluated for use in early phase trials. Although primarily targeted at cancer research, our approach is readily applicable to other areas for which a similar information model is available.}
}
@ARTICLE{iterator,
  author = {Jeremy Gibbons and Bruno C. d. S. Oliveira},
  title = {The Essence of the {I}terator Pattern},
  journal = {Submitted for publication},
  year = {2007},
  url = {http://www.comlab.ox.ac.uk/jeremy.gibbons/publications/iterator.pdf},
  note = {Revised (December 2007) version of \cite{iterator-msfp}},
  abstract = {The \textsc{Iterator} pattern gives a clean interface for element-by-element access to a collection.  Imperative iterations using the pattern have two simultaneous aspects: \emph{mapping} and \emph{accumulating}.  Various existing functional iterations model one or other of these, but not both simultaneously.  We argue that McBride and Paterson's \emph{applicative functors}, and in particular the corresponding \emph{traverse} operator, do exactly this, and therefore capture the essence of the \textsc{Iterator} pattern.  We present some axioms for traversal, and illustrate with a simple example, the \emph{wordcount} problem.}
}
@INPROCEEDINGS{icegov,
  author = {Charles Crichton and Jim Davies and Jeremy Gibbons and Steve Harris and Aadya Shukla},
  title = {Semantics Frameworks for e-Government},
  booktitle = {First International Conference on Theory and Practice of Electronic Governance},
  year = {2007},
  editor = {Tomasz Janowski and Theresa Pardo},
  address = {Macau},
  month = DEC,
  organization = {International Institute for Software Technology, United Nations University},
  url = {http://www.comlab.ox.ac.uk/jeremy.gibbons/publications/icegov.pdf},
  abstract = {This paper explains how semantic frameworks can be used to support successful e-Government initiatives by connecting system design to a shared understanding of interactions and processes.  It shows how metadata standards and repositories can be used to establish and maintain such an understanding, and how they can be used in the automatic generation and instantiation of components and services. It includes an account of a successful implementation at an international level, and a brief review of related approaches.}
}
@UNPUBLISHED{cufp2007,
  author = {Jeremy Gibbons},
  title = {Report on {F}ourth {W}orkshop on ``{C}ommercial {U}sers of {F}unctional {P}rogramming''},
  note = {Oxford University Computing Laboratory},
  url = {http://www.comlab.ox.ac.uk/jeremy.gibbons/publications/cufp2007.pdf},
  month = DEC,
  year = {2007},
  abstract = {The goal of the \textit{Commercial Users of Functional Programming} series of workshops is ``to build a community for users of functional programming languages and technology''. The fourth workshop in the series took place in Freiburg, Germany on 4th October 2007, colocated as usual with the \textit{International Conference on Functional Programming}. The workshop is flourishing, having grown from an intimate gathering of 25 people in Snowbird in 2004, through 40 in Tallinn in 2005 and 57 in Portland in 2006, to 104 registered participants (and more than a handful of casual observers) this time. \par For the first time this year, the organisers had the encouraging dilemma of receiving more offers of presentations than would fit in the available time. The eventual schedule included an invited talk by Xavier Leroy, eleven contributed presentations, and an energetic concluding discussion. Brief summaries of each appear below.}
}
@UNPUBLISHED{parametric,
  author = {Jeremy Gibbons and Ross Paterson},
  title = {Parametric Datatype-Genericity},
  note = {Submitted for publication},
  month = JUL,
  year = {2007},
  url = {http://www.comlab.ox.ac.uk/jeremy.gibbons/publications/parametric.pdf},
  abstract = {Datatype-generic programs are programs that are parametrized by a \emph{datatype} or \emph{type functor}. There are two main styles of datatype-generic programming: the \emph{Algebra of Programming} approach, characterized by structured recursion operators parametrized by a shape functor, and the \emph{Generic Haskell} approach, characterized by case analysis over the structure of a datatype. We show that the former enjoys a kind of parametricity, relating the behaviours of generic functions at different types; in contrast, the latter is more ad~hoc, with no coherence required or provided between the various clauses of a definition.}
}
@ARTICLE{bpmn-csp,
  author = {Peter Y.H. Wong and Jeremy Gibbons},
  title = {A Process Semantics for {BPMN}},
  journal = {Draft},
  year = {2007},
  url = {http://www.comlab.ox.ac.uk/peter.wong/pub/bpmnsem.pdf},
  abstract = {Business Process Modelling Notation (BPMN), developed by the Business Process Management Initiative (BPMI), intends to bridge the gap between business process design and implementation. However, the specification of the notation does not include a formal semantics. This paper shows how a subset of the BPMN can be given a process semantics in Communicating Sequential Processes. Such a semantics allows developers to formally analyse and compare BPMN diagrams. A realistic example of a complex business process is included. }
}
@INPROCEEDINGS{cgmda,
  author = {Radu Calinescu and Steve Harris and Jeremy Gibbons and Jim Davies and Igor Toujilov and Sylvia Nagl},
  title = {Model-Driven Architecture for Cancer Research},
  booktitle = {Software Engineering and Formal Methods},
  year = 2007,
  month = SEP,
  url = {http://www.comlab.ox.ac.uk/jeremy.gibbons/publications/cgmda.pdf},
  abstract = {It is a common phenomenon for research projects to collect and analyse valuable data using ad-hoc information systems. These costly-to-build systems are often composed of incompatible variants of the same modules, and record data in ways that prevent any meaningful result analysis across similar projects. We present a framework that uses a combination of formal methods, model-driven development and service-oriented architecture (SOA) technologies to automate the generation of data management systems for cancer clinical trial research, an area particularly affected by these problems. The SOA solution generated by the framework is based on an information model of a cancer clinical trial, and comprises components for both the collection and analysis of cancer research data, within and across clinical trial boundaries. While primarily targeted at cancer research, our approach is readily applicable to other areas for which a similar information model is available.}
}
@INPROCEEDINGS{uto,
  author = {Michael Anthony Smith and Jeremy Gibbons},
  title = {Unifying Theories of Objects},
  crossref = {ifm2007},
  pages = {599-618},
  url = {http://www.comlab.ox.ac.uk/jeremy.gibbons/publications/uto.pdf},
  abstract = {We present an approach to modelling Abadi--Cardelli-style object calculi as UTP \emph{designs}. Here we provide a core object calculus with an operational \emph{small-step evaluation rule} semantics, and a corresponding UTP model with a denotational \emph{relational predicate} semantics. For clarity, the UTP model is defined in terms of an operand stack, which is used to store the results of subprograms.  Models of a less operational nature are briefly discussed. The consistency of the UTP model is demonstrated by a structural induction proof over the operations of the core object calculus. Overall, our UTP model is intended to provide facilities for encoding both object-based and class-based languages.}
}
@PROCEEDINGS{ifm2007,
  title = {Integrated Formal Methods},
  booktitle = {Integrated Formal Methods},
  year = {2007},
  editor = {Jim Davies and Jeremy Gibbons},
  volume = {4591},
  series = {Lecture Notes in Computer Science},
  publisher = {Springer-Verlag},
  image = {http://www.comlab.ox.ac.uk/oucl/work/jeremy.gibbons/publications/ifm2007.jpg},
  url = {http://www.comlab.ox.ac.uk/jeremy.gibbons/publications/ifm2007-preface.pdf},
  abstract = {The design and analysis of computing systems presents a significant challenge:
systems need to be understood at many different levels of abstraction, and examined
from many different perspectives. Formal methods---languages, tools,
and techniques with a sound, mathematical basis---can be used to develop a
thorough understanding, and to support rigorous examination.
\par
Further research into effective integration is required if these methods are
to have a significant impact outside academia. The Integrated Formal Methods
(IFM) series of conferences seeks to promote that research, to bring together
the researchers carrying it out, and to disseminate the results of that research
among the wider academic and industrial community.
\par
Earlier meetings in the series were held at: York (1999); Dagstuhl (2000);
Turku (2002); Kent (2004); Eindhoven (2005). IFM 2007 is the largest to date,
with 32 technical papers (from 85 submissions), 3 invited talks, 3 workshops,
and a tutorial. The success of the series reflects the enthusiasm and efforts of
the IFM community, and the organisers would like to thank the speakers, the
committee, and the reviewers for their contributions.}
}
@INPROCEEDINGS{dgp,
  author = {Jeremy Gibbons},
  title = {Datatype-Generic Programming},
  crossref = {ssdgp},
  url = {http://www.comlab.ox.ac.uk/jeremy.gibbons/publications/dgp.pdf},
  abstract = {\emph{Generic programming} aims to increase the flexibility of
      programming languages, by expanding the possibilities for
      parametrization~--- ideally, without also expanding the possibilities
      for uncaught errors. The term means different things to different
      people: \emph{parametric polymorphism}, \emph{data abstraction},
      \emph{meta-programming}, and so on. We use it to mean polytypism,
      that is, parametrization by the \emph{shape} of data structures
      rather than their contents. To avoid confusion with other uses, we
      have coined the qualified term \emph{datatype-generic programming}
      for this purpose.  In these lecture notes, we expand on the
      definition of datatype-generic programming, and present some examples
      of datatype-generic programs. We also explore the connection with
      \emph{design patterns} in object-oriented programming; in particular,
      we argue that certain design patterns are just higher-order
      datatype-generic programs.}
}
@PROCEEDINGS{ssdgp,
  title = {Spring School on Datatype-Generic Programming},
  booktitle = {Spring School on Datatype-Generic Programming},
  year = {2007},
  editor = {Roland Backhouse and Jeremy Gibbons and Ralf Hinze and Johan Jeuring},
  volume = {4719},
  series = {Lecture Notes in Computer Science},
  publisher = {Springer-Verlag},
  image = {http://www.comlab.ox.ac.uk/oucl/work/jeremy.gibbons/publications/ssdgp.jpg},
  doi = {10.1007/978-3-540-76786-2},
  url = {http://www.comlab.ox.ac.uk/jeremy.gibbons/publications/ssdgp-preface.pdf},
  abstract = {A leitmotif in the evolution of programming paradigms has been the level and extent of parametrisation that is facilitated~--- the so-called \emph{genericity} of the paradigm. The sorts of parameters that can be envisaged in a programming language range from simple values, like integers and floating-point numbers,through structured values, types and classes, to kinds (the type of types and/or classes). \emph{Datatype-generic programming} is about parametrising programs by the structure of the data that they manipulate, exploiting that structure when it is relevant and ignoring it when it is not. Programming languages most commonly used at the present time do not provide effective mechanisms for documenting and implementing datatype genericity. This volume is a contribution towards improving the state of the art.}
}
@INPROCEEDINGS{gip,
  author = {Jeremy Gibbons and Meng Wang and Bruno C. d. S. Oliveira},
  title = {Generic and Indexed Programming},
  booktitle = {Trends in Functional Programming},
  year = {2007},
  editor = {Marco Morazan},
  url = {http://www.comlab.ox.ac.uk/jeremy.gibbons/publications/gip.pdf},
  abstract = {The EPSRC-funded \emph{Generic and Indexed Programming} project will explore the interaction between \emph{datatype-generic programming} (DGP) ---~programs parametrized by the shape of their data~--- and \emph{indexed programming} (IP) ---~lightweight dependently-typed programming, with programs indexed by type-level representations of properties. Integrating these two notions will provide new ways for programmers to \emph{capture abstractions}.}
}
@INPROCEEDINGS{compatibility,
  author = {Peter Y. H. Wong and Jeremy Gibbons},
  title = {Verifying Business Process Compatibility},
  booktitle = {3rd International Workshop on Methods and Tools for Coordinating Concurrent, Distributed and Mobile Systems (MTCoord'07)},
  year = {2007},
  address = {Paphos, Cyprus},
  month = JUN,
  note = {A shorter version of this paper was presented at the 2nd European Young Researchers Workshop on Service Oriented Computing, Leicester, United Kingdom, June 1997},
  abstract = {Business Process Modelling Notation (BPMN), developed by the Business Process Management Initiative (BPMI), intends to bridge the gap between business process design and implementation. In this paper we describe a process-algebraic approach to verifying process interactions for business collaboration described in BPMN. We first describe briefly a process semantics for BPMN in Communicating Sequential Processes; we then use a simple example of business collaboration to demonstrate how our semantic model may be used to verify compatibility between business participants in a collaboration.}
}
@INPROCEEDINGS{workflow,
  author = {Peter Y. H. Wong and Jeremy Gibbons},
  title = {A Process-Algebraic Approach to Workflow Specification and Refinement},
  booktitle = {Software Composition},
  year = {2007},
  url = {http://web.comlab.ox.ac.uk/oucl/work/peter.wong/pub/sc2007.pdf},
  abstract = {This paper describes a process algebraic approach to
      specification and refinement of workflow processes. 
      In particular, we model both specification and implementation
      of workflows as CSP processes. CSP's
      behavioural models and their respective refinement relations 
      not only enable us to prove correctness properties of an individual
      workflow process against its behavioural specification, but also
      allows us to design and develop workflow processes compositionally.
      Moreover, coupled with CSP is an industrial strength
      automated model checker FDR, which allows behavioural properties of
      workflow models to be proved automatically. This paper details some
      CSP models of van der Aalst et al.'s control flow workflow patterns,
      and illustrates behavioural specification and refinement of workflow
      systems with a business process scenario.}
}
@INPROCEEDINGS{crosstrial,
  author = {Radu Calinescu and Steve Harris and Jeremy Gibbons and Jim Davies},
  title = {Cross-Trial Query System for Cancer Clinical Trials},
  booktitle = {International Joint Conferences on Computer, Information and
      Systems Sciences and Engineering (CISSE)},
  year = {2006},
  month = DEC,
  url = {http://www.comlab.ox.ac.uk/jeremy.gibbons/publications/crosstrial.pdf},
  abstract = {Data sharing represents one of the key objectives and major challenges of today's cancer research. CancerGrid, a consortium of clinicians, cancer researchers, computational biologists and software engineers from leading UK institutions, is developing open-standards cancer informatics addressing this challenge. The CancerGrid solution involves the representation of a widely accepted clinical trials model in controlled vocabulary and common data elements (CDEs) as the enabling factor for cancer data sharing. This paper describes a cancer data query system that supports data sharing across CancerGrid-compliant clinical trial boundaries. The formal specification of the query system allows the model-driven development of a flexible, web-based interface that cancer researchers with limited IT experience can use to identify and query common data across multiple clinical trials.}
}
@INPROCEEDINGS{iterator-msfp,
  author = {Jeremy Gibbons and Bruno C. d. S. Oliveira},
  title = {The Essence of the {I}terator Pattern},
  booktitle = {Mathematically-Structured Functional Programming},
  year = {2006},
  editor = {Conor McBride and Tarmo Uustalu},
  url = {http://www.comlab.ox.ac.uk/jeremy.gibbons/publications/iterator-msfp.pdf},
  note = {Superseded by \cite{iterator}},
  abstract = {The Iterator pattern gives a clean interface for
      element-by-element access to a collection.  Imperative iterations
      using the pattern have two simultaneous aspects: mapping and
      accumulating.  Various functional iterations model one or other of
      these, but not both simultaneously.  We argue that McBride and
      Paterson's idioms, and in particular the corresponding traverse
      operator, do exactly this, and therefore capture the essence of the
      Iterator pattern.  We present some axioms for traversal, and
      illustrate with a simple example, the repmin problem.}
}
@INPROCEEDINGS{hodgp,
  author = {Jeremy Gibbons},
  title = {Design Patterns as Higher-Order Datatype-generic Programs},
  booktitle = {Workshop on Generic Programming},
  year = {2006},
  editor = {Ralf Hinze},
  month = SEP,
  url = {http://www.comlab.ox.ac.uk/jeremy.gibbons/publications/hodgp.pdf},
  abstract = {Design patterns are reusable abstractions in object-oriented
      software. However, using current programming languages, these
      elements can only be expressed extra-linguistically: as prose,
      pictures, and prototypes. We believe that this is not inherent in the
      patterns themselves, but evidence of a lack of expressivity in the
      languages of today. We expect that, in the languages of the future,
      the code part of design patterns will be expressible as reusable
      library components.  Indeed, we claim that the languages of tomorrow
      will suffice; the future is not far away. The necessary features are
      \emph{higher-order} and \emph{datatype-generic} constructs; these
      features are already or nearly available now. We argue the case by
      presenting higher-order datatype-generic programs capturing Origami,
      a small suite of patterns for recursive data structures.}
}
@INPROCEEDINGS{fission,
  author = {Jeremy Gibbons},
  title = {Fission for Program Comprehension},
  booktitle = {Mathematics of Program Construction},
  volume = {4014},
  series = {Lecture Notes in Computer Science},
  year = {2006},
  editor = {Tarmo Uustalu},
  pages = {162-179},
  publisher = {Springer-Verlag},
  url = {http://www.comlab.ox.ac.uk/jeremy.gibbons/publications/fission.pdf},
  abstract = {Fusion is a program transformation that combines adjacent
      computations, flattening structure and improving efficiency at the
      cost of clarity. Fission is the same transformation, in reverse:
      creating structure, ex nihilo. We explore the use of fission for
      program comprehension, that is, for reconstructing the design of a
      program from its implementation. We illustrate through rational
      reconstructions of the designs for three different C programs that
      count the words in a text file.}
}
@ARTICLE{metamorphisms-scp,
  author = {Jeremy Gibbons},
  title = {Metamorphisms: Streaming Representation-Changers},
  journal = SCP,
  year = {2007},
  volume = 65,
  pages = {108-139},
  url = {http://www.comlab.ox.ac.uk/oucl/work/jeremy.gibbons/publications/metamorphisms-scp.pdf},
  note = {Revised version of \cite{metamorphisms-mpc}},
  abstract = {\emph{Unfolds} generate data structures, and \emph{folds}
      consume them. A \emph{hylomorphism} is a fold after an unfold,
      generating then consuming a \emph{virtual data structure}. A
      \emph{metamorphism} is the opposite composition, an unfold after a
      fold; typically, it will convert from one data representation to
      another. In general, metamorphisms are less interesting than
      hylomorphisms: there is no automatic \emph{fusion} to \emph{deforest}
      the intermediate virtual data structure. However, under certain
      conditions fusion is possible: some of the work of the unfold can be
      done before all of the work of the fold is complete. This permits
      \emph{streaming metamorphisms}, and among other things allows
      conversion of \emph{infinite data representations}. We present the
      theory of metamorphisms and outline some examples.}
}
@INPROCEEDINGS{fast+loose,
  author = {Nils Anders Danielsson and Jeremy Gibbons and John Hughes and Patrik Jansson},
  title = {Fast and Loose Reasoning is Morally Correct},
  booktitle = {Principles of Programming Languages},
  year = {2006},
  month = JAN,
  pages = {206-217},
  url = {http://www.comlab.ox.ac.uk/oucl/work/jeremy.gibbons/publications/fast+loose.pdf},
  note = {Accompanying proofs at \url{http://www.cs.chalmers.se/~nad/repos/fast-and-loose-proofs/}},
  abstract = {We justify reasoning about non-total (partial) functional
      languages using methods seemingly only valid for total ones; this
      permits `fast and loose' reasoning without actually being loose. 
      \par
      Two languages are defined, one total and one partial, with identical
      syntax. The semantics of the partial language includes partial and
      infinite values and lifted types, including lifted function spaces. A
      partial equivalence relation is then defined, the domain of which is
      the total subset of the partial language. It is proved that if two
      closed terms have the same semantics in the total language, then they
      have related semantics in the partial language. It is also shown that
      the partial equivalence relation gives rise to a bicartesian closed
      category, which can be used to reason about values in the domain of
      the relation.},
  annote = {}
}
@ARTICLE{spigot,
  author = {Jeremy Gibbons},
  title = {An Unbounded Spigot Algorithm for the Digits of $\pi$},
  journal = {American Mathematical Monthly},
  year = {2006},
  volume = {113},
  number = {4},
  pages = {318-328},
  note = {One of the algorithms described in this paper won a prize for \textit{most useful submission} at the \textit{Succ Zeroth International Obfuscated Haskell Code Contest}!},
  url = {http://www.comlab.ox.ac.uk/oucl/work/jeremy.gibbons/publications/spigot.pdf},
  abstract = {Rabinowitz and Wagon (\textit{American Mathematical Monthly}
      102(3):195--203, 1995) present a \emph{spigot algorithm} for
      computing the digits of $\pi$. A spigot algorithm yields its outputs
      incrementally, and does not reuse them after producing them. Their
      algorithm is inherently \emph{bounded}; it requires a commitment in
      advance to the number of digits to be computed, and in fact might
      still produce an incorrect last few digits. We propose two
      \emph{streaming algorithms} based on the same characterization of
      $\pi$, with the same incremental characteristics but without
      requiring the prior bound.}
}
@ARTICLE{rationals,
  author = {Jeremy Gibbons and David Lester and Richard Bird},
  title = {Enumerating the Rationals},
  journal = {Journal of Functional Programming},
  volume = {16},
  number = {4},
  year = {2006},
  url = {http://www.comlab.ox.ac.uk/oucl/work/jeremy.gibbons/publications/rationals.pdf},
  abstract = {We present a series of programs for enumerating the rational numbers without duplication, drawing on some elegant results of Neil Calkin, Herbert Wilf and Moshe Newman.}
}
@INPROCEEDINGS{typecase,
  author = {Bruno C\'esar dos Santos Oliveira and Jeremy Gibbons},
  title = {Type{C}ase: A Design Pattern for Type-Indexed Functions},
  booktitle = {Haskell Workshop},
  year = {2005},
  editor = {Daan Leijen},
  url = {http://www.comlab.ox.ac.uk/oucl/work/jeremy.gibbons/publications/typecase.pdf},
  abstract = {A \emph{type-indexed function} is a function that is defined
     for each member of some family of types. Haskell's type class
     mechanism provides collections of \emph{open type-indexed functions},
     in which the indexing family can be extended by defining a new type
     class instance but the collection of functions is fixed. The purpose
     of this paper is to present \emph{TypeCase}: a design pattern that
     allows the definition of \emph{closed type-indexed functions}, in
     which the index family is fixed but the collection of functions is
     extensible. It is inspired by Cheney and Hinze's work on lightweight
     approaches to generic programming. We generalise their techniques as
     a \emph{design pattern}. Furthermore, we show that \emph{type-indexed
     functions} with \emph{type-indexed types}, and consequently
     \emph{generic functions} with \emph{generic types}, can also be
     encoded in a lightweight manner, thereby overcoming one of the main
     limitations of the lightweight approaches.}
}
@INPROCEEDINGS{hodgp-oopsla,
  author = {Jeremy Gibbons},
  title = {Design Patterns as Higher-Order Datatype-Generic Programs},
  booktitle = {Object-Oriented Programming: Systems, Languages, Applications},
  year = {2005},
  address = {San Diego},
  month = OCT,
  note = {Tutorial. A revision of \cite{hodgp-ecoop}},
  url = {http://www.oopsla.org/2005/ShowEvent.do?id=121},
  abstract = {The purpose of this tutorial is to draw together ideas from the Design Patterns community (the Gang of Four: Gamma, Helm, Johnson, Vlissides) and the Functional Programming world (eg Bird, Meertens, Hughes). In particular, the thesis is that whereas design patterns must be expressed extra-linguistically (as prose, diagrams, examples) in object-oriented languages, they may be captured directly as abstractions using higher-order operators in functional programming languages. Therefore, they may be reasoned about, type-checked, applied and reused, just as any other abstractions may be.
\par
We argue this case by developing the idea of higher-order operators, specifically for capturing patterns of computation in programs. We then build on this to show how the intentions behind a number of the Gang of Four patterns---such as Composite, Visitor, Iterator, and Builder---have higher-order operators as their analogues in functional languages. Specifically, the structure of these patterns is determined essentially by the structure of the data involved, and they can be captured as generic programs parametrized by that datatype.
\par
The aim is to give greater insight into and understanding of already-familiar patterns.}
}
@INPROCEEDINGS{hodgp-ecoop,
  author = {Jeremy Gibbons},
  title = {Design Patterns as Higher-Order Datatype-Generic Programs},
  booktitle = {European Conference on Object-Oriented Programming},
  year = {2005},
  address = {Glasgow},
  month = JUL,
  note = {Tutorial. Later version appears as \cite{hodgp-oopsla}},
  url = {http://2005.ecoop.org/8.html},
  abstract = {The aim of this tutorial is to draw together ideas from the Design Patterns community (the Gang of Four: Gamma, Helm, Johnson, Vlissides) and the Functional Programming world (eg Bird, Meertens, Hughes). In particular, the thesis is that whereas design patterns must be expressed extra-linguistically (as prose, diagrams, examples) in object-oriented languages, they may be captured directly as abstractions using higher-order operators in functional programming languages. Therefore, they may be reasoned about, type-checked, applied and reused, just as any other abstractions may be. We argue this case by developing the idea of higher-order operators, specifically for capturing patterns of computation in programs. We then bring this around to show how the intentions behind a number of the Gang of Four patterns---such as Composite, Visitor, Iterator, and Builder---have higher-order operators as their analogues in functional languages.}
}
@INPROCEEDINGS{metamorphisms-mpc,
  author = {Jeremy Gibbons},
  title = {Streaming Representation-Changers},
  booktitle = {Mathematics of Program Construction},
  year = {2004},
  month = JUL,
  volume = {3125},
  pages = {142-168},
  series = {Lecture Notes in Computer Science},
  editor = {Dexter Kozen},
  url = {http://www.comlab.ox.ac.uk/oucl/work/jeremy.gibbons/publications/metamorphisms-mpc.pdf},
  note = {Superseded by \cite{metamorphisms-scp}},
  abstract = {\emph{Unfolds} generate data structures, and \emph{folds}
      consume them. A \emph{hylomorphism} is a fold after an unfold,
      generating then consuming a \emph{virtual data structure}. A
      \emph{metamorphism} is the opposite composition, an unfold after a
      fold; typically, it will convert from one data representation to
      another. In general, metamorphisms are less interesting than
      hylomorphisms: there is no automatic \emph{fusion} to \emph{deforest}
      the intermediate virtual data structure. However, under certain
      conditions fusion is possible: some of the work of the unfold can be
      done before all of the work of the fold is complete. This permits
      \emph{streaming metamorphisms}, and among other things allows
      conversion of \emph{infinite data representations}. We present the
      theory of metamorphisms and outline some examples.}
}
@INPROCEEDINGS{patterns,
  author = {Jeremy Gibbons},
  title = {Patterns in Datatype-Generic Programming},
  booktitle = {Multiparadigm Programming},
  year = {2003},
  pages = {277-289},
  editor = {J{\"o}rg Striegnitz and Kei Davis},
  publisher = {John von {N}eumann Institute for Computing (NIC)},
  volume = {27},
  isbn = {3-00-016005-1},
  note = {First International Workshop on Declarative Programming in the
      Context of Object-Oriented Languages (DPCOOL)},
  url = {http://www.comlab.ox.ac.uk/oucl/work/jeremy.gibbons/publications/patterns.pdf},
  abstract = {\emph{Generic programming} consists of increasing the expressiveness of
    programs by allowing a wider variety of kinds of parameter than is usual.
    The most popular instance of this scheme is the C++ Standard Template
    Library.  \emph{Datatype-generic programming} is another 
    instance, in which the parameters take the form of
    datatypes.  We argue that datatype-generic programming is sufficient to
    express essentially all the genericity found in the Standard Template
    Library.  Moreover, datatype-generic programming is a precisely-defined
    notion with a rigorous mathematical foundation, in contrast to generic
    programming in general and the C++ template mechanism in particular, and
    thereby offers the prospect of better static checking and a greater ability
    to reason about generic programs.  This paper describes work in progress.}
}
@INPROCEEDINGS{arith,
  author = {Richard Bird and Jeremy Gibbons},
  title = {Arithmetic Coding with Folds and Unfolds},
  booktitle = {Advanced Functional Programming 4},
  pages = {1-26},
  volume = {2638},
  series = {Lecture Notes in Computer Science},
  year = {2003},
  editor = {Johan Jeuring and Simon Peyton Jones},
  publisher = {Springer-Verlag},
  url = {http://www.comlab.ox.ac.uk/oucl/work/jeremy.gibbons/publications/arith.pdf},
  note = {Code available at \url{http://www.comlab.ox.ac.uk/oucl/work/jeremy.gibbons/publications/arith.zip}},
  abstract = {Arithmetic coding is a method for data compression. It produces a theoretically optimal compression under much weaker assumptions than Huffman and Shannon-Fano, and can compress within one bit of the limit imposed by Shannon's Noiseless Coding Theorem. Earlier presentations provided little in the way of proof of why the various steps in the encoding process were correct, particularly when it came to the specification of precisely what problem the implementation solved, and the details of why the inverse operation of decoding was correct. Our aim in these lectures is to provide a formal derivation of basic algorithms for coding and decoding. Our development makes heavy use of the algebraic laws of folds and unfolds. One novel result concerns a new pattern of computation, which we call \emph{streaming}, whereby elements of an output list are produced as soon as they are determined (and which has nothing to do with lazy evaluation).}
}
@BOOK{ssgp,
  editor = {Roland Backhouse and Jeremy Gibbons},
  title = {Summer School on Generic Programming},
  booktitle = {Summer School on Generic Programming},
  publisher = {Springer-Verlag},
  year = {2003},
  series = {Lecture Notes in Computer Science},
  volume = {2793},
  image = {http://www.comlab.ox.ac.uk/oucl/work/jeremy.gibbons/publications/ssgp.jpg},
  url = {http://www.comlab.ox.ac.uk/oucl/work/jeremy.gibbons/publications/ssgp-toc.pdf},
  abstract = {Generic programming is about making programming more effective by making it more general. This volume is about a novel form of genericity in programs, based on parameterizing programs by the structure of the data they manipulate. The material is based on lectures presented at a summer school on Generic Programming held at the University of Oxford in August 2002.}
}
@INCOLLECTION{origami,
  author = {Jeremy Gibbons},
  title = {Origami Programming},
  crossref = {fop},
  pages = {41-60},
  url = {http://www.comlab.ox.ac.uk/oucl/work/jeremy.gibbons/publications/origami.pdf},
  note = {},
  abstract = {One style of functional programming is based purely on recursive equations.
Such equations are easy to explain, and adequate for any computational purpose, but
hard to use well as programs get bigger and more complicated. In a sense,
recursive equations are the `assembly language' of functional programming,
and direct recursion the \emph{goto}.
As computer scientists discovered in the 1960s with
structured programming, it is better to identify common patterns of use of
such too-powerful tools, and capture these patterns as new constructions
and abstractions. In functional programming, in contrast to imperative
programming, we can often express the new constructions as higher-order
operations within the language, whereas the move from unstructured to
structured programming entailed the development of new languages.
\par
In this chapter we will look at folds and
unfolds as abstractions. 
In a precise technical sense, folds and unfolds are the natural patterns of
computation over recursive datatypes; unfolds generate data structures
and folds consume them. Functional programmers are very familiar with
the \emph{foldr} function on lists, and its directional dual \emph{foldl}; they
are gradually coming to terms with the generalisation to folds on other
datatypes. The computational duals, unfolds,
are still rather unfamiliar; we
hope to show here that they are no more complicated than, and just as
useful as, folds, and to promote a style of programming based on these and
similar recursion patterns.}
}
@BOOK{fop,
  editor = {Jeremy Gibbons and Oege de Moor},
  title = {The Fun of Programming},
  booktitle = {The Fun of Programming},
  publisher = {Palgrave},
  year = {2003},
  series = {Cornerstones in Computing},
  isbn = {1-4039-0772-2},
  note = {ISBN 0333992857 in paperback (\url{http://www.palgrave.com/catalogue/catalogue.asp?Title_Id=0333992857}, \url{http://www.amazon.co.uk/exec/obidos/ASIN/0333992857/}), 1403907722 in hardback (\url{http://www.palgrave.com/catalogue/catalogue.asp?Title_Id=1403907722}, \url{http://www.amazon.co.uk/exec/obidos/ASIN/1403907722/})},
  url = {http://www.comlab.ox.ac.uk/oucl/work/jeremy.gibbons/publications/fop-preface.pdf},
  image = {http://www.comlab.ox.ac.uk/oucl/work/jeremy.gibbons/publications/fop.jpg},
  abstract = {Functional programming has come of age: it is now a standard course in any computer science curriculum. Ideas that were first developed in the laboratory environment of functional programming have proved their values in wider settings, such as generic Java and XML. The time is ripe, therefore, to teach a second course on functional programming, delving deeper into the subject. This book is the text for such a course. The emphasis is on the fun of programming in a modern, well designed programming language such as Haskell. There are chapters that focus on applications, in particular pretty printing, musical composition, hardware description, and graphical design. These applications are interspersed with chapters on techniques, such as the design of efficient data structures, interpreters for other languages, program testing and optimisation. These topics are of interest to every aspiring programmer, not just to those who choose to work in a functional language. Haskell just happens to be a very convenient vehicle for expressing the ideas, and the theme of functional programming as a lingua franca to communicate ideas runs throughout the book. }
}
@INPROCEEDINGS{softeng-projects,
  author = {Andrew Simpson and Andrew Martin and Jeremy Gibbons and Jim
      Davies and Steve McKeever},
  title = {On The Supervision and Assessment Of Part-Time Postgraduate
      Software Engineering Projects },
  booktitle = {International Conference on Software Engineering},
  year = {2003},
  url = {http://www.comlab.ox.ac.uk/oucl/work/jeremy.gibbons/publications/softeng-projects.pdf},
  abstract = {This paper describes existing practices in the supervision and assessment of projects undertaken by part-time, postgraduate students in Software Engineering. It considers this aspect of the learning experience, and the educational issues raised, in the context of existing literature---much of which is focussed upon the experience of full-time, undergraduate students. The importance of these issues will increase with the popularity of part-time study at a postgraduate level; the paper presents a set of guidelines for project supervision and assessment.}
}
@ARTICLE{efolds,
  author = {Clare Martin and Jeremy Gibbons and Ian Bayley},
  title = {Disciplined, efficient, generalised folds for nested datatypes},
  journal = {Formal Aspects of Computing},
  year = {2004},
  volume = {16},
  number = {1},
  pages = {19-35},
  url = {http://www.comlab.ox.ac.uk/oucl/work/jeremy.gibbons/publications/efolds.pdf},
  abstract = {Nested (or non-uniform, or non-regular) datatypes have
     recursive definitions in which the type parameter changes. Their
     folds are restricted in power due to type constraints. Bird and
     Paterson introduced \emph{generalised folds} for extra power, but at
     the cost of a loss of efficiency: folds may take more than linear
     time to evaluate. Hinze introduced \emph{efficient generalised folds}
     to counter this inefficiency, but did so in a pragmatic way, at the
     cost of a loss of reasoning power: without categorical or equivalent
     underpinnings, there are no universal properties for manipulating
     folds. We combine the efficiency of Hinze's construction with the
     powerful reasoning tools of Bird and Paterson's.}
}
@PROCEEDINGS{wcgp,
  title = {Generic Programming},
  year = {2003},
  editor = {Jeremy Gibbons and Johan Jeuring},
  publisher = {Kluwer Academic Publishers},
  note = {Proceedings of the IFIP TC2 Working Conference on Generic
      Programming, Schlo\ss{} Dagstuhl, July 2002},
  image = {http://www.comlab.ox.ac.uk/oucl/work/jeremy.gibbons/publications/wcgp.jpg},
  url = {http://www.comlab.ox.ac.uk/oucl/work/jeremy.gibbons/publications/wcgp-preface.pdf},
  abstract = {Generic programming techniques have always been of interest, both to practitioners and to theoreticians, but only recently have generic programming techniques become a specific focus of research in the functional and ob ject-oriented programming language communities. The IFIP TC2 Working Conference on Generic Programming, held at Schlo\ss{} Dagstuhl,Germany, on 11th and 12th July 2002, brought together leading researchers in generic programming from around the world, and featured papers capturing the state of the art in this important emerging area. The conference was sponsored by IFIP Technical Committee 2, and organized in cooperation with Working Group 2.1 on Algorithmic Languages and Calculi. This book contains revised versions of the papers that were presented at the conference.}
}
@INCOLLECTION{acmmpc-calcfp,
  author = {Jeremy Gibbons},
  title = {Calculating Functional Programs},
  crossref = {acmmpc},
  pages = {148--203},
  url = {http://www.comlab.ox.ac.uk/oucl/work/jeremy.gibbons/publications/acmmpc-calcfp.pdf},
  abstract = {Functional programs are merely equations; they may be
      manipulated by straightforward equational reasoning. In particular,
      one can use this style of reasoning to \emph{calculate} programs, in
      the same way that one calculates numeric values in arithmetic. Many
      useful theorems for such reasoning derive from an \emph{algebraic}
      view of programs, built around datatypes and their
      operations. Traditional algebraic methods concentrate on initial
      algebras, constructors, and values; dual \emph{co-algebraic} methods
      concentrate on final co-algebras, destructors, and processes. Both
      methods are elegant and powerful; they deserve to be combined. }
}
@INCOLLECTION{acmmpc-optimization,
  author = {Richard Bird and Jeremy Gibbons and Shin Cheng Mu},
  title = {Algebraic Methods for Optimization Problems},
  crossref = {acmmpc},
  pages = {281--307},
  url = {http://www.comlab.ox.ac.uk/oucl/work/jeremy.gibbons/publications/acmmpc-optimization.pdf},
  abstract = {We argue for the benefits of relations over functions for
      modelling programs, and even more so for modelling specifications. To
      support this argument, we present an extended case study for a class
      of optimization problems, deriving efficient functional programs from
      concise relational specifications.}
}
@BOOK{acmmpc,
  editor = {Roland Backhouse and Roy Crole and Jeremy Gibbons},
  title = {Algebraic and Coalgebraic Methods in the Mathematics of Program
     Construction},
  booktitle = {Algebraic and Coalgebraic Methods in the Mathematics of Program
     Construction},
  publisher = {Springer-Verlag},
  year = {2002},
  volume = {2297},
  series = {Lecture Notes in Computer Science},
  image = {http://www.comlab.ox.ac.uk/oucl/work/jeremy.gibbons/publications/acmmpc.jpg},
  url = {http://www.comlab.ox.ac.uk/oucl/work/jeremy.gibbons/publications/acmmpc-toc.pdf},
  abstract = {Program construction is about turning specifications of computer software into implementations. Doing so in a way that guarantees correctness is an undertaking requiring deep understanding of the languages and tools being used, as well as of the application domain. Recent research aimed at improving the process of program construction exploits insights from abstract algebraic tools such as lattice theory, fixpoint calculus, universal algebra, category theory and allegory theory. This book provides an introduction to these mathematical theories and how they are applied to practical problems.}
}
@INPROCEEDINGS{superposition,
  author = {Jeremy Gibbons},
  title = {Towards a Colimit-Based Semantics for Visual Programming},
  booktitle = {Coordination Models and Languages},
  volume = {2315},
  series = {Lecture Notes in Computer Science},
  year = {2002},
  month = APR,
  pages = {166--173},
  url = {http://www.comlab.ox.ac.uk/oucl/work/jeremy.gibbons/publications/superposition-extended.pdf},
  abstract = {Software architects such as Garlan and Katz promote the
      separation of \emph{computation} from \emph{coordination}. They
      encourage the study of \emph{connectors} as first-class entities, and
      \emph{superposition} of connectors onto components as a paradigm for
      component-oriented programming.  We demonstrate that this is a good
      model for what \emph{visual programming tools} like IBM's VisualAge
      actually do. Moreover, Fiadeiro and Maibaum's categorical semantics
      of parallel programs is applicable to this model, so we can make
      progress towards a formal semantics of visual programming.}
}
@ARTICLE{kernels,
  author = {Jeremy Gibbons and Graham Hutton and Thorsten Altenkirch},
  title = {When is a Function a Fold or an Unfold?},
  note = {Proceedings of Coalgebraic Methods in Computer Science},
  journal = {entcs},
  volume = 44,
  number = 1,
  year = {2001},
  month = APR,
  url = {http://www.comlab.ox.ac.uk/oucl/work/jeremy.gibbons/publications/kernels.ps.gz},
  abstract = {We give a necessary and sufficient condition for when a
      set-theoretic function can be written using the recursion operator
      fold, and a dual condition for the recursion operator unfold.  The
      conditions are simple, practically useful, and generic in the
      underlying datatype.}
}
@ARTICLE{corecursive,
  author = {Jeremy Gibbons and Graham Hutton},
  title = {Proof Methods for Corecursive Programs},
  journal = {Fundamenta Informatica},
  year = {2005},
  volume = {66},
  number = {4},
  pages = {353-366},
  month = {April/May},
  url = {http://www.comlab.ox.ac.uk/oucl/work/jeremy.gibbons/publications/corecursive.pdf},
  abstract = {Recursion is a well-known and powerful programming
     technique, with a wide variety of applications.  The dual technique
     of corecursion is less well-known, but is increasingly proving to be
     just as useful.  This article is a tutorial on the four main methods
     for proving properties of corecursive programs: fixpoint induction,
     the approximation (or take) lemma, coinduction, and fusion.}
}
@ARTICLE{semantics,
  author = {Clare Martin and Jeremy Gibbons},
  title = {On the Semantics of Nested Datatypes},
  journal = IPL,
  year = {2001},
  month = DEC,
  volume = {80},
  number = {5},
  pages = {233--238},
  url = {http://www.comlab.ox.ac.uk/oucl/work/jeremy.gibbons/publications/semantics.ps.gz},
  abstract = {Nested (or non-regular or non-uniform) datatypes are
      recursively defined parameterised datatypes in which the parameter of
      the datatype changes in the recursive call. The standard semantic
      definition of recursively defined datatypes is as initial algebras in
      the category $\mathit{Set}$ of sets and total functions. Bird and
      Meertens have shown that this theory is inadequate to describe nested
      datatypes. Instead, one solution proposed there was to define them as
      initial algebras in the functor category
      $\mathit{Nat}(\mathit{Set})$, with objects all endofunctors on
      $\mathit{Set}$ and arrows all natural transformations between
      them. We show here that initial algebras are not guaranteed to exist
      in the 
      functor category itself, but that they do exist in one of its
      subcategories: the category of all \emph{cocontinuous} endofunctors
      and natural transformations. This category is then a suitable
      semantic domain for nested datatypes, both first order and
      higher-order.}
}
@INPROCEEDINGS{pointwise,
  author = {Oege de Moor and Jeremy Gibbons},
  title = {Pointwise Relational Programming},
  booktitle = {Algebraic Methodology and Software Technology},
  volume = {1816},
  series = {Lecture Notes in Computer Science},
  pages = {371--390},
  year = {2000},
  month = MAY,
  url = {http://www.comlab.ox.ac.uk/oucl/work/jeremy.gibbons/publications/pointwise.ps.gz},
  abstract = {The point-free relational calculus has been very successful as
      a language for discussing general programming principles. However,
      when it comes to specific applications, the calculus can be rather
      awkward to use: some things are more clearly and simply expressed
      using variables. The combination of variables and relational
      combinators such as converse and choice yields a kind of
      nondeterministic functional programming language. We give a semantics
      for such a language, and illustrate with an example application.}
}
@ARTICLE{approx,
  author = {Graham Hutton and Jeremy Gibbons},
  title = {The Generic Approximation Lemma},
  journal = {Information Processing Letters},
  volume = {79},
  number = {4},
  pages = {197--201},
  year = 2001,
  month = AUG,
  url = {http://www.comlab.ox.ac.uk/oucl/work/jeremy.gibbons/publications/approx.ps.gz},
  abstract = {The approximation lemma was recently introduced as a
     simplification of the well-known take lemma, and is used
     to prove properties of programs that produce lists of values.  We show
     how the approximation lemma, unlike the take lemma, can naturally be
     generalised from lists to a large class of datatypes, and present a
     generic approximation lemma that is parametric in the datatype
     to which it applies.  As a useful by-product, we find that generalising
     the approximation lemma in this way also simplifies its proof.}
}
@ARTICLE{bridging,
  author = {Oege de Moor and Jeremy Gibbons},
  title = {Bridging the Algorithm Gap: A Linear-Time Functional Program for
      Paragraph Formatting},
  journal = SCP,
  year = {1999},
  volume = {35},
  number = {1},
  url = {http://www.comlab.ox.ac.uk/oucl/work/jeremy.gibbons/publications/bridging.ps.gz},
  abstract = {In the constructive programming community it is commonplace to
      see formal developments of textbook algorithms. In the algorithm
      design community, on the other hand, it may be well known that the
      textbook solution to a problem is not the most efficient
      possible. However, in presenting the more efficient solution, the
      algorithm designer will usually omit some of the implementation
      details, thus creating an algorithm gap between the abstract
      algorithm and its concrete implementation. This is in contrast to the
      formal development, which usually proceeds all the way to the
      complete concrete implementation of the less efficient solution. 
      \par
      We claim that the algorithm designer is forced to omit some of the
      details by the relative expressive poverty of the Pascal-like
      languages typically used to present the solution. The greater
      expressiveness provided by a functional language would allow the
      whole story to be told in a reasonable amount of space. In this paper
      we use a functional language to present the development of a
      sophisticated algorithm all the way to the final code. We hope to
      bridge the algorithm gap between abstract and concrete
      implementations, and thereby facilitate communication between the
      constructive programming and algorithm design communities.}
}
@INPROCEEDINGS{naturally,
  author = {Richard Bird and Jeremy Gibbons and Geraint Jones},
  title = {Program Optimisation, Naturally},
  booktitle = {Millenial Perspectives in Computer Science},
  publisher = {Palgrave},
  year = {2000},
  editor = {J. W. Davies and A. W. Roscoe and J. C. P. Woodcock},
  url = {http://www.comlab.ox.ac.uk/oucl/work/jeremy.gibbons/publications/naturally.ps.gz},
  abstract = {It is well-known that each polymorphic function satisfies a
      certain equational law, called a \emph{naturality} condition. Such
      laws are part and parcel of the basic toolkit for improving the
      efficiency of functional programs. More rarely, some polymorphic
      functions also possess a \emph{higher-order} naturality property. One
      example is the operation \emph{unzip} that takes lists of pairs to
      pairs of lists. Surprisingly, this property can be invoked to improve
      the asymptotic efficiency of some divide-and-conquer algorithms from
      $O(n log n)$ to $O(n)$. The improved algorithms make use of
      polymorphic recursion, and can be expressed neatly using nested
      datatypes, so they also serve as evidence of the practical utility of
      these two concepts.}
}
@ARTICLE{radix,
  author = {Jeremy Gibbons},
  title = {A Pointless Derivation of Radixsort},
  journal = {Journal of Functional Programming},
  year = {1999},
  volume = {9},
  number = {3},
  pages = {339--346},
  url = {http://www.comlab.ox.ac.uk/oucl/work/jeremy.gibbons/publications/radix.ps.gz},
  abstract = {This paper is about point-free (or `pointless') calculations~---
     calculations performed at the level of function composition instead of
     that of function application. We address this topic with the help of an
     example, namely calculating the \emph{radix-sort} algorithm from a more
     obvious specification of sorting. The message that we hope to send
     is that point-free calculations are sometimes surprisingly simpler than
     the corresponding point-wise calculations.}
}
@ARTICLE{genda,
  author = {Jeremy Gibbons},
  title = {Generic Downwards Accumulations},
  journal = SCP,
  year = {2000},
  volume = 37,
  pages = {37--65},
  url = {http://www.comlab.ox.ac.uk/oucl/work/jeremy.gibbons/publications/genda.ps.gz},
  abstract = {A \emph{downwards accumulation} is a higher-order operation that
      distributes information downwards through a data structure, from the
      root towards the leaves.  The concept was originally introduced in an
      ad hoc way for just a couple of kinds of tree.  We generalize the
      concept to an arbitrary regular datatype; the resulting definition is
      co-inductive.}
}
@INPROCEEDINGS{unfold,
  author = {Jeremy Gibbons and Geraint Jones},
  title = {The Under-Appreciated Unfold},
  booktitle = {Proceedings of the Third ACM SIGPLAN International Conference
      on Functional Programming},
  address = {Baltimore, Maryland},
  pages = {273--279},
  year = {1998},
  month = SEP,
  url = {http://www.comlab.ox.ac.uk/oucl/work/jeremy.gibbons/publications/unfold.ps.gz},
  abstract = {\emph{Folds} are appreciated by functional programmers. Their 
     dual, \emph{unfolds}, are not new, but they are not nearly as well
     appreciated. We believe they deserve better.  To illustrate, we present
     (indeed, we calculate) a number of algorithms for computing the
     \emph{breadth-first traversal} of a tree.  We specify breadth-first
     traversal in terms of \emph{level-order traversal}, which we characterize
     first as a fold.  The presentation as a fold is simple, but it is
     inefficient, and removing the inefficiency makes it no longer a fold.  We
     calculate a characterization as an unfold from the characterization as a
     fold; this unfold is equally clear, but more efficient.  We also calculate
     a characterization of breadth-first traversal directly as an unfold; this
     turns out to be the `standard' queue-based algorithm.}
}
@INPROCEEDINGS{polyda,
  author = {Jeremy Gibbons},
  title = {Polytypic Downwards Accumulations},
  booktitle = {Proceedings of Mathematics of Program Construction},
  volume = {1422},
  series = {Lecture Notes in Computer Science},
  year = {1998},
  editor = {Johan Jeuring},
  publisher = {Springer-Verlag},
  address = {Marstrand, Sweden},
  month = JUN,
  url = {http://www.comlab.ox.ac.uk/oucl/work/jeremy.gibbons/publications/polyda.ps.gz},
  abstract = {A \emph{downwards accumulation} is a higher-order operation that
     distributes information downwards through a data structure, from the root
     towards the leaves.  The concept was originally introduced in an ad~hoc 
     way for just a couple of kinds of tree.  We generalize the concept to an
     arbitrary polynomial datatype; our generalization proceeds via the notion
     of a \emph{path} in such a datatype.}
}
@ARTICLE{spjava,
  author = {Jeremy Gibbons},
  title = {Structured Programming in {J}ava},
  journal = {SIGPLAN Notices},
  year = {1998},
  month = APR,
  volume = 33,
  number = 4,
  pages = {40--43},
  note = {Also in Proceedings of Second Conference on Java in the Computing
      Curriculum, South Bank University, London, ed Fintan Culwin},
  url = {http://www.comlab.ox.ac.uk/oucl/work/jeremy.gibbons/publications/spjava.ps.gz},
  abstract = {We argue that for computing majors, it is better to use a `why'
     approach to teaching programming than a `how' approach; this involves
     (among other things) teaching structured programming before
     progressing to higher-level styles such as object-oriented
     programming.  We also argue that, once it has been decided to teach
     structured programming, Java is a reasonable language to choose for
     doing so.}
}
@INPROCEEDINGS{calculating,
  author = {Jeremy Gibbons},
  title = {Calculating Functional Programs},
  booktitle = {Proceedings of ISRG/SERG Research Colloquium},
  year = {1997},
  editor = {Keiichi Nakata},
  organization = {School of Computing and Mathematical Sciences, Oxford
      Brookes University},
  note = {Technical Report CMS-TR-98-01},
  month = NOV,
  url = {http://www.comlab.ox.ac.uk/oucl/work/jeremy.gibbons/publications/calculating.ps.gz},
  abstract = {A good way of developing a correct program is to \emph{calculate}
      it from its specification. Functional programming languages are
      especially suitable for this, because their referential transparency
      greatly helps calculation. We discuss the ideas behind program
      calculation, and illustrate with an example (the \emph{maximum
      segment sum} problem). We show that calculations are driven by
      \emph{promotion}, and that promotion properties arise from
      \emph{universal properties} of the data types involved.}
}
@TECHREPORT{merging,
  author = {Jeremy Gibbons},
  title = {More on Merging and Selection},
  institution = {School of Computing and Mathematical Sciences,
	Oxford Brookes University},
  number = {CMS-TR-97-08},
  year = {1997},
  month = OCT,
  url = {http://www.comlab.ox.ac.uk/oucl/work/jeremy.gibbons/publications/merging.ps.gz},
  abstract = {In his paper \textit{On Merging and Selection}
	(\textit{Journal of Functional Programming} 7(3),
	1997), Bird considers the problem of computing the
	$n$th element of the list resulting from merging the
	two sorted lists $x$ and $y$. Representing $x$ and
	$y$ by trees, Bird derives an algorithm for the
	problem taking time proportional to the sum of their
	depths. Bird's derivation is more complicated than
	necessary. By the simple tactic of delaying a design
	decision (in this case, the decision to represent
	the lists as trees) as long as possible, we obtain a
	much simpler solution.}
}
@TECHREPORT{conditionals,
  author = {Jeremy Gibbons},
  title = {Conditionals in Distributive Categories},
  institution = {School of Computing and Mathematical Sciences, Oxford
      Brookes University},
  year = {1997},
  number = {CMS-TR-97-01},
  url = {http://www.comlab.ox.ac.uk/oucl/work/jeremy.gibbons/publications/conditionals.ps.gz},
  abstract = {In a distributive category (a category in which the product
     distributes over the coproduct), coproducts can be used to model
     conditional expressions. We develop such a theory of conditionals.}
}
@ARTICLE{thirdht,
  author = {Jeremy Gibbons},
  title = {The {T}hird {H}omomorphism {T}heorem},
  journal = {Journal of Functional Programming},
  year = {1996},
  volume = {6},
  number = {4},
  note = {Earlier version appeared in C.\,B.\,Jay, editor, {\it Computing:
      The Australian Theory Seminar}, Sydney, December~1994, p.\,62--69},
  url = {http://www.comlab.ox.ac.uk/oucl/work/jeremy.gibbons/publications/thirdht.ps.gz},
  abstract = {The {\em Third Homomorphism Theorem\/} is a folk theorem of
      the constructive algorithmics community. It states that a function on
      lists that can be computed both from left to right and from right to
      left is necessarily a {\em list homomorphism\/}---it can be computed
      according to {\em any\/} parenthesization of the list.  
      \par 
      We formalize and prove the theorem, and describe two practical
      applications: to fast parallel algorithms for language recognition
      problems and for downwards accumulations on trees.}
}
@PROCEEDINGS{dmtcs,
  title = {Combinatorics, Complexity and Logic: Proceedings of Discrete
      Mathematics and Theoretical Computer Science},
  year = {1996},
  editor = {Douglas Bridges and Cris Calude and Jeremy Gibbons and Steve
      Reeves and Ian Witten},
  publisher = {Springer-Verlag},
  notes = {ISBN 981-3083-14-X},
  address = {Singapore},
  image = {http://www.comlab.ox.ac.uk/oucl/work/jeremy.gibbons/publications/dmtcs.jpg},
  abstract = {DMTCS'96 is the first of a planned series of conferences organized by the Centre for Discrete Mathematics and Theoretical Computer Science, and is the first joint venture of the Computer Science and Mathematics departments of the University of Auckland and Waikato, New Zealand. These proceedings contain original papers which had been solicited in all areas of discrete mathematics and theoretical computer science, in particular in the areas of combinatorics, complexity, computability, constructivity, and logic.}
}
@ARTICLE{drawing,
  author = {Jeremy Gibbons},
  title = {Deriving Tidy Drawings of Trees},
  journal = {Journal of Functional Programming},
  year = {1996},
  volume = {6},
  number = {3},
  pages = {535--562},
  note = {Earlier version appears as Technical Report No.\,82, Department of
      Computer Science, University of Auckland},
  url = {http://www.comlab.ox.ac.uk/oucl/work/jeremy.gibbons/publications/drawing.ps.gz},
  abstract = {The {\em tree-drawing problem\/} is to produce a `tidy'
      mapping of elements of a tree to points in the plane. In this
      paper, we derive an efficient algorithm for producing tidy
      drawings of trees. The specification, the starting point for the
      derivations, consists of a collection of intuitively appealing
      {\em criteria\/} satisfied by tidy drawings. The derivation
      shows constructively that these criteria completely determine
      the drawing. Indeed, there is essentially only one reasonable
      drawing algorithm satisfying the criteria; its development is
      almost mechanical.  
      \par 
      The algorithm consists of an {\em upwards accumulation\/}
      followed by a {\em downwards accumulation\/} on the tree, and is
      further evidence of the utility of these two higher-order tree
      operations.}
}
@ARTICLE{quickly,
  author = {Jeremy Gibbons},
  title = {Computing Downwards Accumulations on Trees Quickly},
  journal = {Theoretical Computer Science},
  year = {1996},
  volume = {169},
  number = {1},
  pages = {67--80},
  note = {Earlier version appeared in Proceedings of the 16th Australian
      Computer Science Conference, Brisbane, 1993},
  url = {http://www.comlab.ox.ac.uk/oucl/work/jeremy.gibbons/publications/quickly.ps.gz},
  abstract = {{\em Downwards passes\/} on binary trees are essentially
      functions which pass information down a tree, from the root towards
      the leaves. Under certain conditions, a downwards pass is both
      `efficient' (computable in a functional style in parallel time
      proportional to the depth of the tree) and `manipulable' (enjoying a
      number of distributivity properties useful in program construction);
      we call a downwards pass satisfying these conditions a {\em downwards
      accumulation}. In this paper, we show that these conditions do in
      fact yield a stronger conclusion: the accumulation can be computed in
      parallel time proportional to the {\em logarithm\/} of the depth of
      the tree, on a CREW PRAM machine.}
}
@INPROCEEDINGS{tracing,
  author = {Jeremy Gibbons and Keith Wansbrough},
  title = {Tracing Lazy Functional Languages},
  booktitle = {Computing: The Australasian Theory Seminar},
  year = {1996},
  month = JAN,
  editor = {Michael E. Houle and Peter Eades},
  pages = {11--20},
  address = {Melbourne},
  url = {http://www.comlab.ox.ac.uk/oucl/work/jeremy.gibbons/publications/tracing.ps.gz},
  abstract = {We argue that Ariola and Felleisen's and Maraist, Odersky and
      Wadler's axiomatization of the call-by-need lambda calculus forms a
      suitable formal basis for tracing evaluation in lazy functional
      languages. In particular, it allows a one-dimensional textual
      representation of terms, rather than requiring a two-dimensional
      graphical representation using arrows. We describe a program
      LetTrace, implemented in Gofer and tracing lazy evaluation of a
      subset of Gofer.}
}
@INPROCEEDINGS{dashed,
  author = {Jeremy Gibbons},
  title = {Dotted and Dashed Lines in {M}etafont},
  booktitle = {Proceedings of the 1995 Annual Meeting},
  year = {1995},
  editor = {Robin Fairbairns},
  organization = {\TeX{} Users' Group},
  url = {http://www.comlab.ox.ac.uk/oucl/work/jeremy.gibbons/publications/dashed.ps.gz},
  abstract = {We show how to draw evenly dotted and dashed curved lines in
      Metafont, using {\em recursive refinement\/} of paths. Metapost
      provides extra primitives that can be used for this task, but the
      method presented here can be used for both Metafont and Metapost.}
}
@INPROCEEDINGS{damgs,
  author = {Jeremy Gibbons},
  title = {An Initial-Algebra Approach to Directed Acyclic Graphs},
  booktitle = {Mathematics of Program Construction},
  volume = {947},
  series = {Lecture Notes in Computer Science},
  editor = {Bernhard M{\"o}ller},
  publisher = {Springer-Verlag},
  pages = {282--303},
  year = {1995},
  url = {http://www.comlab.ox.ac.uk/oucl/work/jeremy.gibbons/publications/damgs.ps.gz},
  abstract = {The initial-algebra approach to modelling datatypes consists
      of giving {\em constructors\/} for building larger objects of that
      type from smaller ones, and {\em laws\/} identifying different ways
      of constructing the same object. The recursive decomposition of
      objects of the datatype leads directly to a recursive pattern of
      computation on those objects, which is very helpful for both
      functional and parallel programming.
      \par
      We show how to model a particular kind of directed acyclic graph
      using this initial-algebra approach.}
}
@UNPUBLISHED{nzfpdc-squiggol,
  author = {Jeremy Gibbons},
  title = {An Introduction to the {B}ird-{M}eertens {F}ormalism},
  note = {Presented at `New Zealand Formal Program Development
      Colloquium', Hamilton, November~1994},
  year = {1994},
  month = NOV,
  url = {http://www.comlab.ox.ac.uk/oucl/work/jeremy.gibbons/publications/nzfpdc-squiggol.ps.gz},
  abstract = {The Bird-Meertens Formalism, or `Squiggol', is a calculus for
      the construction of programs from their specifications by a process
      of equational reasoning. Developments are directed by considerations
      of {\em data}, as opposed to {\em program}, structure.  
      \par
      This paper presents a brief introduction to the philosophy and
      notation of the calculus, in the guise of the (well-known) derivation
      of a linear-time solution to the `maximum segment sum' problem.}
}
@ARTICLE{efficient,
  author = {Jeremy Gibbons and Wentong Cai and David Skillicorn},
  title = {Efficient Parallel Algorithms for Tree Accumulations},
  journal = {Science of Computer Programming},
  year = {1994},
  volume = {23},
  pages = {1--18},
  url = {http://www.comlab.ox.ac.uk/oucl/work/jeremy.gibbons/publications/efficient.ps.gz},
  abstract = {Accumulations are higher-order operations on structured
      objects; they leave the shape of an object unchanged, but replace
      elements of that object with accumulated information about other
      elements. Upwards and downwards accumulations on trees are two such
      operations; they form the basis of many tree algorithms.  We present
      two EREW PRAM algorithms for computing accumulations on trees taking
      $O(\log n)$ time on $O(n/\log n)$ processors, which is optimal.}
}
@INPROCEEDINGS{nzfpdc-drawing,
  author = {Jeremy Gibbons},
  title = {How to Derive Tidy Drawings of Trees},
  booktitle = {Proceedings of Salodays in Auckland},
  year = {1994},
  editor = {C. Calude and M. J. J. Lennon and H. Maurer},
  pages = {53--73},
  address = {Department of Computer Science, University of Auckland},
  seealso = {Condensed version of \cite{Gibbons96:Deriving}},
  note = {Also presented at `New Zealand Formal Program Development
      Colloquium', Hamilton, November~1994},
  url = {http://www.comlab.ox.ac.uk/oucl/work/jeremy.gibbons/publications/nzfpdc-drawing.ps.gz},
  abstract = {The {\em tree-drawing problem\/} is to produce a `tidy'
      mapping of elements of a tree to points in the plane. In this
      paper, we derive an efficient algorithm for producing tidy
      drawings of trees. The specification, the starting point for the
      derivations, consists of a collection of intuitively appealing
      {\em criteria\/} satisfied by tidy drawings. The derivation
      shows constructively that these criteria completely determine
      the drawing. Indeed, there is essentially only one reasonable
      drawing algorithm satisfying the criteria; its development is
      almost mechanical.  
      \par 
      The algorithm consists of an {\em upwards accumulation\/}
      followed by a {\em downwards accumulation\/} on the tree, and is
      further evidence of the utility of these two higher-order tree
      operations.}
}
@TECHREPORT{linear,
  author = {Geraint Jones and Jeremy Gibbons},
  title = {Linear-time Breadth-first Tree Algorithms: An Exercise in the
      Arithmetic of Folds and Zips},
  institution = {Programming Research Group, University of Oxford, and
      Department of Computer Science, University of Auckland},
  year = {1993},
  month = {May},
  number = {Computer Science Report No.\,71, University of Auckland},
  note = {IFIP Working Group 2.1 working paper 705~WIN-2},
  url = {http://www.comlab.ox.ac.uk/oucl/work/jeremy.gibbons/publications/linear.ps.gz},
  abstract = {This paper is about an application of the mathematics of the
      zip, reduce (fold) and accumulate (scan) operations on lists. It
      gives an account of the derivation of a linear-time breadth-first
      tree traversal algorithm, and of a subtle and efficient
      breadth-first tree labelling algorithm.}
}
@INPROCEEDINGS{formal,
  author = {Jeremy Gibbons},
  title = {Formal Methods: Why Should {I} Care? The Development of the {T800}
      {T}ransputer Floating-Point Unit},
  booktitle = {Proceedings of the 13th New Zealand Computer Society
      Conference},
  year = {1993},
  pages = {207--217},
  editor = {John Hosking},
  url = {http://www.comlab.ox.ac.uk/oucl/work/jeremy.gibbons/publications/formal.ps.gz},
  abstract = {The term `formal methods' is a general term for precise
      mathematically-based techniques used in the development of computer
      systems, both hardware and software. This paper discusses formal
      methods in general, and in particular describes their
      successful role in specifying, constructing and proving correct
      the floating-point unit of the Inmos T800 transputer chip.}
}
@INPROCEEDINGS{accumulations,
  author = {Jeremy Gibbons},
  title = {Upwards and Downwards Accumulations on Trees},
  booktitle = {Mathematics of Program Construction},
  editor = {R. S. Bird and C. C. Morgan and J. C. P. Woodcock},
  year = {1993},
  volume = {669},
  series = {Lecture Notes in Computer Science},
  publisher = {Springer-Verlag},
  note = {This revised version appeared in 
      \textit{Proceedings of the Massey Functional Programming Workshop}
      (ed E. Ireland and N. Perry, 1992)},
  url = {http://www.comlab.ox.ac.uk/oucl/work/jeremy.gibbons/publications/accumulations.ps.gz},
  abstract = {An {\em accumulation\/} is a higher-order operation over
      structured objects of some type; it leaves the shape of an object
      unchanged, but replaces each element of that object with some
      accumulated information about the other elements. Upwards and
      downwards accumulations on trees are two instances of this scheme;
      they replace each element of a tree with some function---in fact,
      some homomorphism---of that element's descendants and of its
      ancestors, respectively. These two operations can be thought of as
      passing information up and down the tree.
      \par
      We introduce these two accumulations, and show how together they
      solve the so-called prefix sums problem.}
}
@PHDTHESIS{ata,
  author = {Jeremy Gibbons},
  title = {Algebras for Tree Algorithms},
  school = {Programming Research Group, Oxford University},
  type = {{D}.\,{Phil}.~thesis},
  year = {1991},
  note = {Available as Technical Monograph PRG-94. ISBN 0-902928-72-4},
  abstract = {This thesis presents an investigation into the properties of
      various algebras of trees. In particular, we study the influence
      that the structure of a tree algebra has on the solution of
      algorithmic problems about trees in that algebra. The investigation
      is conducted within the framework provided by the Bird-Meertens
      formalism, a calculus for the construction of programs by
      equational reasoning from their specifications.
      \par
      We present three different tree algebras: two kinds of binary tree
      and a kind of general tree. One of the binary tree algebras, called
      `hip trees', is new. Instead of being built with a single ternary
      operator, hip trees are built with two binary operators which
      respectively add left and right children to trees which do not
      already have them; these operators enjoy a kind of associativity
      property.
      \par
      Each of these algebras brings with it with a class of
      `structure-respecting' functions called catamorphisms; the
      definition of a catamorphism and a number of its properties come
      for free from the definition of the algebra, because the algebra is
      chosen to be initial in a class of algebras induced by a
      (cocontinuous) functor. Each algebra also brings with it, but not
      for free, classes of `structure-preserving' functions called
      accumulations. An accumulation is a function that preserves the
      shape of a structured object such as a tree, but replaces each
      element of that object with some catamorphism applied to some of
      the other elements.  The two classes of accumulation that we study
      are the `upwards' and `downwards' accumulations, which pass
      information from the leaves of a tree towards the root and from the
      root towards the leaves, respectively.
      \par
      Upwards and downwards accumulations turn out to be the key to the
      solution of many problems about trees. We derive accumulation-based
      algorithms for a number of problems; these include the parallel
      prefix algorithm for the prefix sums problem, algorithms for
      bracket matching and for drawing binary and general trees, and
      evaluators for decorating parse trees according to an attribute
      grammar.}
}
@ARTICLE{balanced,
  author = {Jeremy Gibbons},
  title = {Balanced Binary Trees as a Partial Free Algebra},
  journal = {The Squiggolist},
  year = {1990},
  volume = {1},
  number = {4},
  abstract = {You don't need `subset' types to construct balanced trees, if you allow yourself partial constructors or a countable infinity of constructors.}
}
@ARTICLE{unique,
  author = {Jeremy Gibbons},
  title = {The Unique Derivative of a Prefix-Closed Predicate},
  journal = {The Squiggolist},
  year = {1989},
  volume = {1},
  number = {2},
  abstract = {Talking about `the' derivative of a prefix-closed function makes no sense, but talking about the strongest and the weakest derivative does.}
}
@ARTICLE{interesting,
  author = {Jeremy Gibbons},
  title = {An Interesting Characterisation of Prefix-Closure},
  journal = {The Squiggolist},
  year = {1989},
  volume = {1},
  number = {2},
  abstract = {A predicate is prefix-closed iff it holds of all initial segments of a list precisely when it holds of the whole list.}
}
@ARTICLE{kmp,
  author = {Richard S. Bird and Jeremy Gibbons and Geraint Jones},
  title = {Formal Derivation of a Pattern Matching Algorithm},
  journal = SCP,
  year = {1989},
  volume = {12},
  number = {2},
  pages = {93--104},
  month = JUL,
  url = {http://dx.doi.org/10.1016/0167-6423(89)90036-1},
  abstract = {This paper is devoted to the synthesis of a functional version of the Knuth-Morris-Pratt algorithm for pattern matching. This algorithm was first discussed by Knuth; since then formal developments have been given by Dijkstra and Dromey, among many others. The novel aspects of the present treatment are: (i) the result is expressed as a (very short) functional program; and (ii) the derivation makes use of the calculus of lists described by Bird.}
}
@MASTERSTHESIS{newview,
  author = {Jeremy Gibbons},
  title = {A New View of Binary Trees},
  school = {Programming Research Group, Oxford University},
  year = {1988},
  type = {Transferral dissertation},
  note = {Abstract appears in the Bulletin of the EATCS, number~39, p.~214.},
  abstract = {We present a new formalism of labelled binary trees, using two
      partial binary constructors instead of the usual total ternary
      constructor. This formalism is complemented by some higher-order
      operators, encapsulating common patterns of computation on trees. We
      explore their use by deriving solutions to a couple of algorithmic
      problems on trees. 
      \par
      The work proceeds in the Bird-Meertens style. This is a calculus for
      programs, closely related to applicative programming languages.
      Expressions are written at the function level, avoiding the use of
      unnecessary names and being as concise as possible. Derivations are
      performed by program transformation~--- the application of
      correctness-preserving transformations to an initial specification,
      with the intention of improving its efficiency without changing its
      meaning.}
}

This file has been generated by bibtex2html 1.85.

Jeremy Gibbons (Jeremy.Gibbons@comlab.ox.ac.uk)
Random Image
Random Image
Random Image