@misc{Seward1996,
abstract = {The bzip2 file compression program was developed by Julian Seward and launched on the 18th of July in 1996. It has remained an open source program, available to all for free, for over twenty two years now. The last stable release was seven years ago. The version 1.0.6 was released on the 20th of September in 2010. bzip2 compression program is based on Burrows–Wheeler algorithm. The program can compress files but cannot archive them. Julian Seward is still in charge of maintaining the program. The compression application works on all major operating systems and is available as a BSD-like license. The program uses .bz2 as its filename extension, application/x-bzip2 as the media type on internet and public.archive.bzip2 as the uniform type identifier.},
author = {Seward, Julian},
keywords = {program,site},
mendeley-tags = {program,site},
title = {{Bzip2}},
url = {http://www.bzip.org/},
year = {1996}
}
@article{Gurung2010,
abstract = {The Corner Table (CT) represents a triangle mesh by storing 6 integer references per triangle (3 vertex references in the Vertex table and 3 references to opposite corners in the Opposite table, which accelerate access to adjacent triangles). The Compact Half Face (CHF) representation extends CT to tetrahedral meshes, storing 8 references per tetrahedron (4 in the Vertex table and 4 in the Opposite table). We use the term Vertex Opposite Table (VOT) to refer to both CT and CHF and propose a sorted variation, SVOT, which is inspired by tetrahedral mesh encoding techniques and which works for both triangle and tetrahedral meshes. The SVOT does not require additional storage and yet provides, for each vertex, a reference to an incident corner from which the star (incident cells) of the vertex may be traversed at a constant cost per visited element. We use the corner operators for querying and traversing the triangle meshes while for tetrahedral meshes, we propose a set of powerful wedge-based operators. Improving on the SVOT, we propose our Sorted Opposite Table (SOT) variation, which eliminates the Vertex table completely and hence reduces storage requirements by 50{\%} to only 3 references per triangle for triangle meshes and 4 references and 9 bits per tetrahedron for tetrahedral meshes, while preserving the vertex-to-incident- corner references and supporting the corner operators and our wedge operators with a constant average cost. The SVOT and SOT representation work on manifold meshes with boundaries.},
author = {Gurung, Topraj and Rossignac, Jarek},
doi = {10.1145/1629255.1629266},
file = {:C$\backslash$:/Users/abbas/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Gurung, Rossignac - 2010 - SOT Compact representation for triangle and tetrahedral meshes.pdf:pdf},
isbn = {9781605587110},
issn = {1605587117},
journal = {Georgia Institute of Technology GT-IC-10-01},
keywords = {data structures,geometry compression,meshing,modeling,polyhedra,storage,tetrahedral meshes,triangle meshes},
mendeley-tags = {data structures},
pages = {1--10},
pmid = {23373023},
title = {{SOT: Compact representation for triangle and tetrahedral meshes}},
url = {http://scholar.google.com/scholar?hl=en{\&}btnG=Search{\&}q=intitle:Compact+Representation+for+Triangle+and+Tetrahedral+Meshes{\#}4},
year = {2010}
}
@misc{Preda2008,
author = {Preda, Marius},
keywords = {site},
mendeley-tags = {site},
title = {{Graphics Codec - MPEG-4}},
url = {http://www.mymultimediaworld.com/software/opensource/gc/},
urldate = {2018-10-17},
year = {2008}
}
@article{Isenburg2003,
abstract = { Polygonal models acquired with emerging 3D scanning technology or from large scale $\backslash$nCAD applications easily reach sizes of several gigabytes and do not fit in the address space $\backslash$nof common 32-bit desktop PCs. In this paper we propose an out-of-core mesh compression technique $\backslash$nthat converts such gigantic meshes into a streamable, highly compressed representation. During $\backslash$ndecompression only a small portion of the mesh needs to be kept in memory at any time. As full $\backslash$nconnectivity information is available along the decompression boundaries, this provides seamless $\backslash$nmesh access for incremental in-core processing on gigantic meshes. Decompression speeds are $\backslash$nCPU-limited and exceed one million vertices and two million triangles per second on a 1.8 GHz $\backslash$nAthlon processor.A novel external memory data structure provides our compression engine with $\backslash$ntransparent access to arbitrary large meshes. This out-of-core mesh was designed to accommodate $\backslash$nthe access pattern of our region-growing based compressor, which - in return - performs mesh $\backslash$nqueries as seldom and as local as possible by remembering previous queries as long as needed and $\backslash$nby adapting its traversal slightly. The achieved compression rates are state-of-the-art.},
author = {Isenburg, Martin and Gumhold, Stefan},
doi = {10.1145/882262.882366},
file = {:C$\backslash$:/Users/abbas/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Isenburg, Gumhold - 2003 - Out-of-core compression for gigantic polygon meshes.pdf:pdf},
isbn = {1-58113-709-5},
issn = {07300301},
journal = {ACM Transactions on Graphics},
keywords = {cs,edu,external memory data structures,isenburg,mesh compression,out-of-core algorithms,processing sequences,streaming meshes,unc},
number = {3},
pages = {935},
title = {{Out-of-core compression for gigantic polygon meshes}},
volume = {22},
year = {2003}
}
@misc{Collet2013,
abstract = {New generation entropy codecs : Finite State Entropy and Huff0},
author = {Collet, Yann},
keywords = {program,site},
mendeley-tags = {program,site},
title = {{Finite State Entropy library}},
url = {https://github.com/Cyan4973/FiniteStateEntropy},
year = {2013}
}
@article{Grabner2002,
author = {Grabner, Marcus},
file = {:C$\backslash$:/Users/abbas/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Grabner - 2002 - Compressed Adaptive Multiresolution Encoding.pdf:pdf},
title = {{Compressed Adaptive Multiresolution Encoding}},
year = {2002}
}
@misc{Preda2008a,
author = {Preda, Marius},
keywords = {site},
mendeley-tags = {site},
title = {{3D Graphics Compression Model | MPEG}},
url = {https://mpeg.chiariglione.org/standards/mpeg-4/3d-graphics-compression-model},
urldate = {2018-10-20},
year = {2008}
}
@misc{Deering1995,
abstract = {Java 3D API Specs Deering 1995},
author = {Deering, Michael},
keywords = {site},
mendeley-tags = {site},
title = {{3D Geometry Compression}},
url = {https://docs.oracle.com/cd/E17802{\_}01/j2se/javase/technologies/desktop/java3d/forDevelopers/j3dguide/AppendixCompress.doc.html},
urldate = {2018-10-17},
year = {1995}
}
@article{Lee2005,
author = {Lee, Haeyoung and Park, Sujin},
file = {:C$\backslash$:/Users/abbas/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Lee, Park - 2005 - Adaptive Vertex Chasing for the Lossless Geometry Coding of 3D Meshes.pdf:pdf},
isbn = {3540300279},
issn = {03029743},
journal = {Advances in Multimedia Information Processing - PCM 2005},
pages = {108--119},
title = {{Adaptive Vertex Chasing for the Lossless Geometry Coding of 3D Meshes}},
volume = {3767},
year = {2005}
}
@article{Maglo2013,
abstract = {3D meshes are commonly used to represent virtual surface and volumes. However, their raw data representations take a large amount of space. Hence, 3D mesh compression has been an active research topic since the mid 90's. In 2005, two very good review articles describing the pioneering works were published. Yet, new technologies have emerged since then. In this article, we summarize the early works and put the focus on these novel approaches. We classify and describe the algorithms, evaluate their performance and provide synthetic comparisons. We also outline the emerging trends for future researches.},
author = {Maglo, Adrien},
doi = {10.1145/0000000.0000000},
file = {:C$\backslash$:/Users/abbas/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Maglo - 2013 - 39 3D mesh compression survey, comparisons and emerging trends.pdf:pdf},
journal = {Article},
keywords = {2013 3D mesh compression: survey,C{\'{e}}line Hudelot,E4 [Data]: Coding And Information Theory General T,Experimentation Additional Key Words and Phrases:,Florent Dupont,Guillaume Lavou{\'{e}},Performance,Theory,comparisons and emerging trends ACM Comput,compression,dy-namic ACM Reference Format: Adrien Maglo,progressive,random accessible,single-rate},
number = {4},
title = {{39 3D mesh compression: survey, comparisons and emerging trends}},
url = {http://dx.doi.org/10.1145/0000000.0000000},
volume = {9},
year = {2013}
}
@article{Peng2005,
abstract = {Three-dimensional (3D) meshes have been widely used in graphic applications for the representation of 3D objects. They often require a huge amount of data for storage and/or transmission in the raw data format. Since most applications demand compact storage, fast transmission, and efficient processing of 3D meshes, many algorithms have been proposed to compress 3D meshes efficiently since early 1990s. In this survey paper, we examine 3D mesh compression technologies developed over the last decade, with the main focus on triangular mesh compression technologies. In this effort, we classify various algorithms into classes, describe main ideas behind each class, and compare the advantages and shortcomings of the algorithms in each class. Finally, we address some trends in the 3D mesh compression technology development. {\textcopyright} 2005 Elsevier Inc. All rights reserved.},
author = {Peng, Jingliang and Kim, Chang Su and Kuo, C. C. Jay},
doi = {10.1016/j.jvcir.2005.03.001},
file = {:C$\backslash$:/Users/abbas/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Peng, Kim, Kuo - 2005 - Technologies for 3D mesh compression A survey.pdf:pdf},
isbn = {1047-3203},
issn = {10473203},
journal = {Journal of Visual Communication and Image Representation},
keywords = {3D mesh compression,MPEG-4,Progressive mesh coding,Single-rate mesh coding,survey},
mendeley-tags = {survey},
number = {6},
pages = {688--733},
title = {{Technologies for 3D mesh compression: A survey}},
volume = {16},
year = {2005}
}
@article{Maglo2015,
abstract = {3D meshes are commonly used to represent virtual surface and volumes. However, their raw data representations take a large amount of space. Hence, 3D mesh compression has been an active research topic since the mid 1990s. In 2005, two very good review articles describing the pioneering works were published. Yet, new technologies have emerged since then. In this article, we summarize the early works and put the focus on these novel approaches. We classify and describe the algorithms, evaluate their performance, and provide synthetic comparisons. We also outline the emerging trends for future research.},
author = {Maglo, Adrien and Lavou{\'{e}}, Guillaume and Dupont, Florent and Hudelot, C{\'{e}}line},
doi = {10.1145/2693443},
file = {:C$\backslash$:/Users/abbas/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Maglo - 2013 - 3D mesh compression survey , comparisons and emerging trends.pdf:pdf},
isbn = {03600300},
issn = {15577341},
journal = {ACM Computing Surveys},
keywords = {Algorithms,Computers,Performance evalua,Studies,survey},
mendeley-tags = {survey},
title = {{3D Mesh Compression: Survey, Comparisons, and Emerging Trends}},
year = {2015}
}
@article{Jakob2017,
author = {Jakob, Johannes and Buchenau, Christoph and Guthe, Michael},
doi = {10.1111/cgf.13246},
file = {:C$\backslash$:/Users/abbas/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Jakob, Buchenau, Guthe - 2017 - A Parallel Approach to Compression and Decompression of Triangle Meshes using the GPU.pdf:pdf},
number = {5},
title = {{A Parallel Approach to Compression and Decompression of Triangle Meshes using the GPU}},
volume = {36},
year = {2017}
}
@article{,
file = {:C$\backslash$:/Users/abbas/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Unknown - Unknown - COS 426 Precept 4 Agenda.pdf:pdf},
title = {{COS 426 : Precept 4 Agenda}}
}
@article{Gueziec1999,
abstract = {We present a method for compressing non-manifold polygonal meshes, i.e., polygonal meshes with singularities, which occur very frequently in the real-world. Most efficient polygonal compression methods currently available are restricted to a manifold mesh: they require converting a non-manifold mesh to a manifold mesh, and fail to retrieve the original model connectivity after decompression. The present method works by converting the original model to a manifold model, encoding the manifold model using an existing mesh compression technique, and clustering, or stitching together during the decompression process vertices that were duplicated earlier to faithfully recover the original connectivity. This paper focuses on efficiently encoding and decoding the stitching information. Using a naive method, the stitching information would incur a prohibitive cost, while our methods guarantee a worst case cost of O(log m) bits per vertex replication, where m is the number of non-manifold vertices. Furthermore, when exploiting the adjacency between vertex replications, many replications can be encoded with an insignificant cost. By interleaving the connectivity, stitching information, geometry and properties, we can avoid encoding repeated vertices (and properties bound to vertices) multiple times; thus a reduction of the size of the bit-stream of about 10{\%} is obtained compared with encoding the model as a manifold. {\textcopyright} 1999 Elsevier Science B.V. All rights reserved.},
author = {Gu{\'{e}}ziec, Andr{\'{e}} and Bossen, Frank and Taubin, Gabriel and Silva, Claudio},
doi = {10.1016/S0925-7721(99)00027-9},
file = {:C$\backslash$:/Users/abbas/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Gu{\'{e}}ziec et al. - 1999 - Efficient compression of non-manifold polygonal meshes.pdf:pdf},
isbn = {0-7803-5897-X},
issn = {09257721},
journal = {Computational Geometry: Theory and Applications},
keywords = {Geometry compression,Non-manifold,Polygonal mesh,Stitching},
number = {1-3},
pages = {137--166},
title = {{Efficient compression of non-manifold polygonal meshes}},
volume = {14},
year = {1999}
}
@article{Funkhouser2014,
author = {Funkhouser, Tom},
file = {:C$\backslash$:/Users/abbas/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Funkhouser - 2014 - Multiresolution Meshes Multiresolution Meshes.pdf:pdf},
title = {{Multiresolution Meshes Multiresolution Meshes}},
year = {2014}
}
@misc{Turk1996,
author = {Turk, Greg and Levoy, Marc},
keywords = {models,site},
mendeley-tags = {models,site},
title = {{The Stanford 3D Scanning Repository}},
url = {http://graphics.stanford.edu/data/3Dscanrep/},
urldate = {2018-12-13},
year = {1996}
}
@article{Daras2004,
abstract = {MPEG-4's complicated format makes developing scenes from scratch all but impossible for novice users. By converting MPEG-4's text-based description into graphical form, the authors' proposed tool exploits all of MPEG-4's 3D functionalities while easing the authoring burden.},
author = {Daras, Petros and Kompatsiaris, Ioannis and Raptis, Theodoros and Strintzis, Michael G.},
doi = {10.1109/MMUL.2004.1289042},
file = {:C$\backslash$:/Users/abbas/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Daras et al. - 2004 - An MPEG-4 tool for composing 3D scenes.pdf:pdf},
issn = {1070986X},
journal = {IEEE Multimedia},
number = {2},
pages = {58--62+64},
title = {{An MPEG-4 tool for composing 3D scenes}},
volume = {11},
year = {2004}
}
@article{Cignoni2004,
abstract = {We describe an efficient technique for out-of-core construction and accurate view-dependent visualization of very large surface models. The method uses a regular conformal hierarchy of tetrahedra to spatially partition the model. Each tetrahedral cell contains a precomputed simplified version of the original model, represented using cache coherent indexed strips for fast rendering. The representation is constructed during a fine-to-coarse simplification of the surface contained in diamonds (sets of tetrahedral cells sharing their longest edge). The construction preprocess operates out-of-core and parallelizes nicely. Appropriate boundary constraints are introduced in the simplification to ensure that all conforming selective subdivisions of the tetrahedron hierarchy lead to correctly matching surface patches. For each frame at runtime, the hierarchy is traversed coarse-to-fine to select diamonds of the appropriate resolution given the view parameters. The resulting system can interatively render high quality views of out-of-core models of hundreds of millions of triangles at over 40Hz (or 70M triangles/s) on current commodity graphics platforms.},
author = {Cignoni, P.a C and Ganovelli, F.a and Gobbetti, E.b D and Martopn, E.b and Ponchio, F.a and Scopigno, R.a},
doi = {http://doi.acm.org/10.1145/1015706.1015802},
isbn = {0730-0301},
issn = {0730-0301},
journal = {ACM Trans. Graph.},
keywords = {level of detail,out-of-core algorithms},
title = {{Adaptive TetraPuzzles: Efficient out-of-core construction and visualization of gigantic multiresolution polygonal models}},
year = {2004}
}
@article{Gotsman2002,
abstract = {We survey recent developments in compact representations of 3D mesh data. This includes: Methods to reduce the complexity of meshes by simplification, thereby reducing the number of vertices and faces in the mesh; Methods to resample the geometry in order to optimize the vertex distribution; Methods to compactly represent the connectivity data (the graph structure defined by the edges) of the mesh; Methods to compactly represent the geometry data (the vertex coordinates) of a mesh.},
author = {Gotsman, Craig and Gumhold, Stefan and Kobbelt, Leif P.},
doi = {10.1007/978-3-662-04388-2_12},
file = {:C$\backslash$:/Users/abbas/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Gotsman, Gumhold, Kobbelt - 2002 - Simplification and compression of 3D meshes.pdf:pdf},
isbn = {978-3-540-43639-3},
journal = {Tutorials on multiresolution in geometric modeling},
pages = {319--362},
pmid = {12761556},
title = {{Simplification and compression of 3D meshes}},
url = {http://www.cs.sfu.ca/{~}haoz/teaching/cmpt464/references/compression{\_}tutorial.ps{\%}5Cnhttp://www.graphics.rwth-aachen.de/media/papers/mingle1.pdf},
year = {2002}
}
@article{Kalberer2005,
abstract = {We introduce FreeLence, a novel and simple single-rate compression coder for triangle manifold meshes. Our method uses free valences and exploits geometric information for connectivity encoding. Furthermore, we introduce a novel linear prediction scheme for geometry compression of 3D meshes. Together, these approaches yield a significant entropy reduction for mesh encoding with an average of 20-30{\%} over leading single-rate region-growing coders, both for connectivity and geometry. [PUBLICATION ABSTRACT]},
author = {K{\"{a}}lberer, Felix and Polthier, Konrad and Reitebuch, Ulrich and Wardetzky, Max},
doi = {10.1111/j.1467-8659.2005.00872.x},
file = {:C$\backslash$:/Users/abbas/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/K{\"{a}}lberer et al. - 2005 - FreeLence - Coding with free valences.pdf:pdf},
isbn = {01677055},
issn = {14678659},
journal = {Computer Graphics Forum},
number = {3},
pages = {469--478},
title = {{FreeLence - Coding with free valences}},
volume = {24},
year = {2005}
}
@article{Tutte1963,
author = {Tutte, W. T.},
doi = {10.4153/CJM-1963-029-x},
file = {:C$\backslash$:/Users/abbas/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Tutte - 1963 - A census of planar maps.pdf:pdf},
issn = {1496-4279},
journal = {Canadian Journal of Mathematics},
number = {0},
pages = {249--271},
title = {{A census of planar maps}},
url = {https://cms.math.ca/10.4153/CJM-1963-029-x},
volume = {15},
year = {1963}
}
@article{Isenburg2000,
author = {Isenburg, Martin},
file = {:C$\backslash$:/Users/abbas/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Isenburg - 2000 - Triangle Fixer Edge-based connectivity compression.pdf:pdf},
journal = {In 16th EuropeanWorkshop on Comp.Geom.},
pages = {18--23},
title = {{Triangle Fixer: Edge-based connectivity compression}},
url = {http://citeseerx.ist.psu.edu/viewdoc/download?doi=10.1.1.33.1012{\&}rep=rep1{\&}type=pdf},
volume = {2},
year = {2000}
}
@article{Karni2000,
abstract = {We show how spectral methods may be applied to 3D mesh data to obtain compact representations. This is achieved by projecting the mesh geometry onto an orthonormal basis derived from the mesh topology. To reduce complexity, the mesh is partitioned into a number of balanced submeshes with minimal interaction, each of which are compressed independently. Our methods may be used for compression and progressive transmission of 3D content, and are shown to be vastly superior to existing methods using spatial techniques, if slight loss can be tolerated.},
author = {Karni, Zachi and Gotsman, Craig},
doi = {10.1145/344779.344924},
file = {:C$\backslash$:/Users/abbas/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Karni, Gotsman - 2000 - Spectral compression of mesh geometry.pdf:pdf},
isbn = {1581132085},
journal = {Proceedings of the 27th annual conference on Computer graphics and interactive techniques - SIGGRAPH '00},
pages = {279--286},
title = {{Spectral compression of mesh geometry}},
url = {http://portal.acm.org/citation.cfm?doid=344779.344924},
year = {2000}
}
@article{Funkhouser2000,
author = {Funkhouser, Thomas},
file = {:C$\backslash$:/Users/abbas/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Funkhouser - 2000 - Review of 3D Object Representations.pdf:pdf},
title = {{Review of 3D Object Representations}},
year = {2000}
}
@article{Rossignac1999,
author = {Rossignac, Jarek and Szymczak, Andrzej},
file = {:C$\backslash$:/Users/abbas/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Rossignac, Szymczak - 1999 - Wrap {\&} zip Linear decoding of planar triangle graphs.pdf:pdf},
journal = {Computational Geometry: Theory and Applications},
number = {v},
pages = {119--135},
title = {{Wrap {\&} zip: Linear decoding of planar triangle graphs}},
volume = {14},
year = {1999}
}
@article{,
file = {:C$\backslash$:/Users/abbas/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Unknown - 2000 - T ubingen.pdf:pdf},
title = {{T ubingen}},
year = {2000}
}
@article{Vasa2013,
abstract = {Many algorithms have been proposed for the task of efficient compression of triangular meshes. Geometric properties of the input data are usually exploited to obtain an accurate prediction of the data at the decoder. Considerations on how to improve the prediction usually focus on its normal part, assuming that the tangential part behaves similarly. In this paper, we show that knowledge of vertex valences might allow the decoder to form a prediction that is more accurate in the tangential direction, using a weighted parallelogram prediction. This idea can be easily implemented into existing compression algorithms, such as Edgebreaker, and it can be applied at different levels of sophistication, from very simple ones, that are computationally very cheap, to some more complex ones that provide an even better compression efficiency.},
author = {V{\'{a}}{\v{s}}a, Libor and Brunnett, Guido},
doi = {10.1109/TVCG.2013.22},
file = {:C$\backslash$:/Users/abbas/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/V{\'{a}}{\v{s}}a, Brunnett - 2013 - Exploiting connectivity to improve the tangential part of geometry prediction.pdf:pdf},
isbn = {1077-2626},
issn = {10772626},
journal = {IEEE Transactions on Visualization and Computer Graphics},
keywords = {Compression,mesh,parallelogram,prediction,triangle,valence},
number = {9},
pages = {1467--1475},
pmid = {23846092},
title = {{Exploiting connectivity to improve the tangential part of geometry prediction}},
volume = {19},
year = {2013}
}
@article{Isenburg2005,
abstract = {The size of geometric data sets in scientific and industrial applications is constantly increasing. Storing surface or volume meshes in standard uncompressed formats results in large files that are expensive to store and slow to load and transmit. Scientists and engineers often refrain from using mesh compression because currently available schemes modify the mesh data. While connectivity is encoded in a lossless manner, the floating-point coordinates associated with the vertices are quantized onto a uniform integer grid to enable efficient predictive compression. Although a fine enough grid can usually represent the data with sufficient precision, the original floating-point values will change, regardless of grid resolution. In this paper we describe a method for compressing floating-point coordinates with predictive coding in a completely lossless manner. The initial quantization step is omitted and predictions are calculated in floating-point. The predicted and the actual floating-point values are broken up into sign, exponent, and mantissa and their corrections are compressed separately with context-based arithmetic coding. As the quality of the predictions varies with the exponent, we use the exponent to switch between different arithmetic contexts. We report compression results using the popular parallelogram predictor, but our approach will work with any prediction scheme. The achieved bit-rates for lossless floating-point compression nicely complement those resulting from uniformly quantizing with different precisions. {\textcopyright} 2004 Elsevier Ltd. All rights reserved.},
author = {Isenburg, Martin and Lindstrom, Peter and Snoeyink, Jack},
doi = {10.1016/j.cad.2004.09.015},
file = {:C$\backslash$:/Users/abbas/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Isenburg, Lindstrom, Snoeyink - 2005 - Lossless compression of predicted floating-point geometry.pdf:pdf},
isbn = {0010-4485},
issn = {00104485},
journal = {CAD Computer Aided Design},
keywords = {Floating-point,Geometry coding,Lossless,Mesh compression},
number = {8},
pages = {869--877},
title = {{Lossless compression of predicted floating-point geometry}},
volume = {37},
year = {2005}
}
@article{Meshes2012,
author = {Meshes, Polygonal},
doi = {10.1007/978-1-4471-4075-7},
file = {:C$\backslash$:/Users/abbas/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Meshes - 2012 - Polygonal Meshes 5.pdf:pdf},
isbn = {9781447140757},
pages = {83--97},
title = {{Polygonal Meshes 5}},
year = {2012}
}
@inproceedings{pajarola2000squeeze,
author = {Pajarola, Renato and Rossignac, Jarek},
booktitle = {Computer Graphics International, 2000. Proceedings},
file = {:C$\backslash$:/Users/abbas/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Pajarola, Rossignac - 2000 - Squeeze Fast and progressive decompression of triangle meshes.pdf:pdf},
organization = {IEEE},
pages = {173--182},
title = {{Squeeze: Fast and progressive decompression of triangle meshes}},
year = {2000}
}
@article{Rossignac2005,
abstract = {This chapter discusses 3D compression techniques for reducing the delays in transmitting triangle meshes over the Internet. It first explains how vertex coordinates, which represent surface samples, may be compressed through quantization, prediction, and entropy coding. It then describes how the connectivity, which specifies how the surface interpolates these samples, may be compressed by compact encoding of the parameters of a connectivity-graph construction process and by transmission of the vertices in the order in which they are encountered during this process. The storage of triangle meshes compressed with these techniques is usually reduced to about a byte per triangle. When the exact geometry and connectivity of the mesh are not essential, the triangulated surface may be simplified or retiled. Although simplification techniques and the progressive transmission of refinements may be used as a compression tool, the chapter focuses on recently proposed retiling techniques, designed specifically to improve 3D compression. {\textcopyright} 2005 Copyright {\textcopyright} 2005 Elsevier Inc. All rights reserved.},
author = {Rossignac, Jarek},
doi = {10.1016/B978-012387582-2/50020-4},
file = {:C$\backslash$:/Users/abbas/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Rossignac - 2005 - 3D mesh compression.pdf:pdf},
isbn = {9780123875822},
journal = {Visualization Handbook},
pages = {359--379},
title = {{3D mesh compression}},
year = {2005}
}
@article{Naturwissenschaften2009,
author = {Naturwissenschaften, Doktors Der},
file = {:C$\backslash$:/Users/abbas/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Naturwissenschaften - 2009 - Compression of Static and Dynamic Three-Dimensional Meshes.pdf:pdf},
title = {{Compression of Static and Dynamic Three-Dimensional Meshes}},
year = {2009}
}
@article{Hoppe1997,
abstract = {Highly detailed geometric models are rapidly becoming common- place in computer graphics. These models, often represented as complex triangle meshes, challenge rendering performance, trans- mission bandwidth, and storage capacities. This paper introduces the progressive mesh (PM) representation, a newscheme for storing and transmitting arbitrary triangle meshes. This efficient, loss- less, continuous-resolution representation addresses several practi- cal problems in graphics: smooth geomorphing of level-of-detail approximations, progressive transmission, mesh compression, and selective refinement. In addition, we present a new mesh simplification procedure for constructing a PM representation from an arbitrary mesh. The goal of this optimization procedure is to preserve not just the geometry of the original mesh, but more importantly its overall appearance as defined by its discrete and scalar appearance attributes such as material identifiers, color values, normals, and texture coordinates. We demonstrate construction of the PM representation and its applications using several practical models. CR},
author = {Hoppe, Hugues},
doi = {10.1145/258734.258843},
file = {:C$\backslash$:/Users/abbas/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Hoppe - 1997 - View-dependent refinement of progressive meshes.pdf:pdf},
isbn = {0897918967},
issn = {00978930},
journal = {Proceedings of the 24th annual conference on Computer graphics and interactive techniques - SIGGRAPH '97},
pages = {189--198},
title = {{View-dependent refinement of progressive meshes}},
url = {http://portal.acm.org/citation.cfm?doid=258734.258843},
year = {1997}
}
@misc{,
keywords = {picture,site},
mendeley-tags = {picture,site},
title = {{Homeomorphic surfaces}},
url = {https://www.open.edu/openlearn/science-maths-technology/mathematics-statistics/surfaces/content-section-2.4{\#}},
urldate = {2018-12-12}
}
@article{Gumhold1999,
author = {Gumhold, Stefan},
file = {:C$\backslash$:/Users/abbas/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Gumhold - 1999 - Improved Cut-Border Machine for Triangle Mesh Compression.pdf:pdf},
journal = {Report},
keywords = {cut border machine},
mendeley-tags = {cut border machine},
title = {{Improved Cut-Border Machine for Triangle Mesh Compression}},
year = {1999}
}
@article{Bajaj2001,
abstract = {This paper presents a new 3D RGB image compression scheme designed for interactive real-time applications. In designing our compression method, we have compromised between two important goals: high compression ratio and fast random access ability, and have tried to minimize the overhead caused during run-time reconstruction. Our compression technique is suitable for applications wherein data are accessed in a somewhat unpredictable fashion, and real-time performance of decompression is necessary. The experimental results on three different kinds of 3D images from medical imaging, image-based rendering, and solid texture mapping suggest that the compression method can be used effectively in developing real-time applications that must handle large volume data, made of color samples taken in three- or higher-dimensional space.},
author = {Bajaj, Chandrajit and Ihm, Insung and Park, Sanghun},
doi = {10.1145/383745.383747},
file = {:C$\backslash$:/Users/abbas/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Bajaj, Ihm, Park - 2001 - 3D RGB image compression for interactive applications.pdf:pdf},
isbn = {0730-0301},
issn = {07300301},
journal = {ACM Transactions on Graphics},
keywords = {3d texture mapping,3d volume data,data compression,haar wavelets,image-based rendering,interactive real-time applications,medical imaging,random access},
number = {1},
pages = {10--38},
title = {{3D RGB image compression for interactive applications}},
url = {http://portal.acm.org/citation.cfm?doid=383745.383747},
volume = {20},
year = {2001}
}
@misc{Mamou2009,
abstract = {TFAN: A low complexity 3D mesh compression algorithm},
author = {Mamou, Khaled},
keywords = {TFAN,site},
mendeley-tags = {TFAN,site},
title = {{Open 3D Graphics Compression}},
url = {https://github.com/KhronosGroup/glTF/wiki/Open-3D-Graphics-Compression https://github.com/amd/rest3d/tree/master/server/o3dgc},
urldate = {2018-10-17},
year = {2009}
}
@article{Lewiner2006,
abstract = {Performances of actual mesh compression algorithms vary significantly depending on the type of model it encodes. These methods rely on prior assumptions on the mesh to be efficient, such as regular connectivity, simple topology and similarity between its elements. However, these priors are implicit in usual schemes, harming their suitability for specific, models. In particular, connectivity-driven schemes are difficult to generalize to higher dimensions and to handle topological singularities. GEncode Is a new single-rate, geometry-driven compression scheme where prior knowledge of the mesh is plugged into the coder in an explicit manner. It encodes meshes of arbitrary dimension without topological restrictions, but can incorporate topological properties, such as manifoldness, to improve, the compression ratio. Prior knowledge of the geometry is taken as an input of the algorithm, represented by a function of the local geometry. This suits particularly well for scanned and remeshed models, where exact geometric, priors are available. Compression results surfaces and volumes are competitive with existing schemes. {\textcopyright} 2006 The Eurographics Association and Blackwell Publishing Ltd.},
author = {Lewiner, Thomas and Craizer, Marcos and Lopes, H{\'{e}}lio and Pesco, Sin{\'{e}}sio and Velho, Luiz and Medeiros, Esdras},
doi = {10.1111/j.1467-8659.2006.00990.x},
file = {:C$\backslash$:/Users/abbas/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Lewiner et al. - 2006 - GEncode Geometry-driven compression for General Meshes.pdf:pdf},
isbn = {01677055},
issn = {14678659},
journal = {Computer Graphics Forum},
keywords = {Arbitrary dimension,Arbitrary meshes,Geometry-driven techniques,Mesh compression},
number = {4},
pages = {685--695},
title = {{GEncode: Geometry-driven compression for General Meshes}},
volume = {25},
year = {2006}
}
@inproceedings{Schindler1998,
abstract = {Summary form only given. All integer based arithmetic coding consists of two steps: proportional range restriction and range expansion (renormalisation). Here a method is presented that significantly reduces the complexity of renormalisation, allowing a speedup of arithmetic coding by a factor of up to 2. The main idea is to treat the output not as a binary number, but as a base 256 (or other) number. This requires less renormalisation and no bitwise operations},
author = {Schindler, M.},
booktitle = {Proceedings DCC '98 Data Compression Conference (Cat. No.98TB100225)},
doi = {10.1109/DCC.1998.672314},
file = {:C$\backslash$:/Users/abbas/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Schindler - 1998 - A fast renormalisation for arithmetic coding.pdf:pdf},
isbn = {1068-0314 VO -},
issn = {10680314},
keywords = {Arithmetic,Counting circuits,Decoding,Encoding,Linux,arithmetic codes,arithmetic coding,base 256 number,complexity reduction,fast renormalisation,integer based arithmetic coding,proportional range restriction,range encoder,range expansion,renormalisation},
mendeley-tags = {range encoder},
pages = {572},
title = {{A fast renormalisation for arithmetic coding}},
url = {http://ieeexplore.ieee.org/lpdocs/epic03/wrapper.htm?arnumber=672314 http://www.compressconsult.com/rangecoder/},
year = {1998}
}
@article{Kronrod2002,
abstract = {Almost all triangle mesh compression algorithms to date are driven by the mesh connectivity code. The geometry code usually employs a straightforward prediction method applied to the vertex sequence as dictated by the connectivity code. This generates a suboptimal geometry code, which results in significant loss in code efficiency, since the geometry dominates the mesh information content. The paper proposes a manifold mesh code which optimizes the geometric component, at the slight expense of the connectivity code. This mesh geometry code is shown to be up to 50{\%} more compact than the state-of-the-art geometry code of Touma and Gotsman (1998), especially for models with non-smooth geometry, such as CAD models.},
author = {Kronrod, B. and Gotsman, C.},
doi = {10.1109/TDPVT.2002.1024124},
file = {:C$\backslash$:/Users/abbas/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Kronrod, Gotsman - 2002 - Optimized compression of triangle mesh geometry using prediction trees.pdf:pdf},
isbn = {0769515215},
journal = {Proceedings - 1st International Symposium on 3D Data Processing Visualization and Transmission, 3DPVT 2002},
pages = {602--608},
title = {{Optimized compression of triangle mesh geometry using prediction trees}},
year = {2002}
}
@misc{Isenburg2000a,
abstract = {In this paper we introduce a simple and efficient scheme for encoding the connectivity and the stripification of a triangle mesh. Since generating a good set of triangle strips is a hard problem, it is desirable to do this just once and store the computed strips with the triangle mesh. However, no previously reported mesh encoding scheme is designed to include triangle strip information into the compressed representation. Our algorithm encodes the stripification and the connectivity in an interwoven fashion, that exploits the correlation existing between the two.},
author = {Isenburg, Martin},
keywords = {site},
mendeley-tags = {site},
title = {{Triangle Strip Compression}},
url = {https://www.cs.unc.edu/{~}isenburg/trianglestripcompression/},
urldate = {2018-10-20},
year = {2000}
}
@article{Taubin1999,
abstract = {Java 3D API Specs Deering 1995},
author = {Taubin, Gabriel and Rossignac, Jarek},
file = {:C$\backslash$:/Users/abbas/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Taubin, Rossignac - 1999 - 3D geometry compression(2).pdf:pdf;:C$\backslash$:/Users/abbas/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Taubin, Rossignac - 1999 - 3D geometry compression(3).pdf:pdf},
journal = {Course Notes},
keywords = {survey},
mendeley-tags = {survey},
pages = {18--24},
title = {{3D geometry compression}},
url = {http://scholar.google.com/scholar?hl=en{\&}btnG=Search{\&}q=intitle:3D+Geometry+Compression{\#}0},
volume = {21},
year = {1999}
}
@misc{Diaz2009,
abstract = {Lzlib is a data compression library providing in-memory LZMA compression and decompression functions, including integrity checking of the decompressed data.},
author = {Diaz, Antonio Diaz},
keywords = {program,site},
mendeley-tags = {program,site},
title = {{Lzlib}},
url = {https://www.nongnu.org/lzip/lzlib.html},
year = {2009}
}
@article{Szymczak2002,
abstract = {We present an algorithm which splits a 3D surface into reliefs, relatively flat regions that have smooth boundaries. The surface is then resampled in a regular manner within each of the reliefs. As a result, we obtain a piecewise regular mesh (PRM) having a regular structure on large regions. Experimental results show that we are able to approximate the input surface with the mean square error of about 0.01-0.02{\%} of the diameter of the bounding box without increasing the number of vertices. We introduce a compression scheme tailored to work with our remeshed models and show that it is able to compress them losslessly (after quantizing the vertex locations) without significantly increasing the approximation error using about 4 bits per vertex of the resampled model.},
author = {Szymczak, Andrzej and Rossignac, Jarek and King, Davis},
doi = {10.1006/gmod.2002.0577},
file = {:C$\backslash$:/Users/abbas/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Szymczak, Rossignac, King - 2002 - Piecewise regular meshes Construction and compression.pdf:pdf},
isbn = {1524-0703},
issn = {15240703},
journal = {Graphical Models},
number = {3-4},
pages = {183--198},
title = {{Piecewise regular meshes: Construction and compression}},
volume = {64},
year = {2002}
}
@article{Bulow2017,
author = {B{\"{u}}low, Maximilian Alexander Von},
file = {:C$\backslash$:/Users/abbas/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/B{\"{u}}low - 2017 - Connectivity and Attribute Compression of Triangle Meshes Bachelor ' s Thesis.pdf:pdf},
number = {March},
title = {{Connectivity and Attribute Compression of Triangle Meshes Bachelor ' s Thesis}},
year = {2017}
}
@article{Isenburg2001,
abstract = {We present a simple linear time algorithm for decoding Edgebreaker encoded triangle meshes in a single traversal. The Edgebreaker encoding technique, introduced by Rossignac (1999), encodes the connectivity of triangle meshes homeomorphic to a sphere with a guaranteed 2 bits per triangle or less. The encoding algorithm visits every triangle of the mesh in a depth-first order. The original decoding algorithm recreates the triangles in the same order they have been visited by the encoding algorithm and exhibits a worst case time complexity of O(n2). More recent work (Rossignac and Szymczak, 1999) uses the same traversal order and improves the worst case to O(n). However, for meshes with handles multiple traversals are needed during both encoding and decoding. We introduce here a simpler decoding technique that performs a single traversal and recreates the triangles in reverse order. {\textcopyright} 2001 Elsevier Science B.V.},
author = {Isenburg, Martin and Snoeyink, Jack},
doi = {10.1016/S0925-7721(01)00034-7},
file = {:C$\backslash$:/Users/abbas/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Isenburg, Snoeyink - 2001 - Spirale Reversi Reverse decoding of the Edgebreaker encoding.pdf:pdf},
issn = {09257721},
journal = {Computational Geometry: Theory and Applications},
keywords = {Connectivity compression,Edgebreaker,Linear decoding},
number = {1-2},
pages = {39--52},
title = {{Spirale Reversi: Reverse decoding of the Edgebreaker encoding}},
volume = {20},
year = {2001}
}
@article{,
file = {:C$\backslash$:/Users/abbas/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Unknown - 2015 - Polygonal Meshes 3D Object Representations.pdf:pdf},
title = {{Polygonal Meshes 3D Object Representations}},
year = {2015}
}
@article{King2000,
author = {King, Davis and Rossignac, Jarek and Szymczak, Andrzej},
file = {:C$\backslash$:/Users/abbas/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/King, Rossignac, Szmczak - 2000 - Connectivity Compression for Irregular Quadrilateral Meshes Summary of Our Results.pdf:pdf},
journal = {Technology},
pages = {1--21},
title = {{Connectivity Compression for Irregular Quadrilateral Meshes Summary of Our Results}},
year = {2000}
}
@misc{Desbrun2004,
author = {Desbrun, Mathieu},
keywords = {models,site},
mendeley-tags = {models,site},
title = {{The Applied Geometry Lab at Caltech}},
url = {http://www.geometry.caltech.edu/},
year = {2004}
}
@misc{Google2015,
author = {Google},
keywords = {program,site},
mendeley-tags = {program,site},
title = {{Brotli}},
url = {https://github.com/google/brotli},
year = {2015}
}
@article{Johansson-evegard2009,
author = {Johansson-eveg{\aa}rd, Erik},
file = {:C$\backslash$:/Users/abbas/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Johansson-eveg{\aa}rd - 2009 - Mesh Data Structures , Lab1.pdf:pdf},
number = {Spring},
pages = {1--7},
title = {{Mesh Data Structures , Lab1}},
year = {2009}
}
@misc{Collet2015,
author = {Collet, Yann},
keywords = {program,site},
mendeley-tags = {program,site},
title = {{Zstandard, zstd}},
url = {http://www.zstd.net},
year = {2015}
}
@misc{Chun2011,
author = {Chun, Won},
keywords = {site},
mendeley-tags = {site},
title = {webgl-loader},
url = {https://code.google.com/archive/p/webgl-loader/},
urldate = {2018-11-20},
year = {2011}
}
@article{Alliez2001,
abstract = {Lossless transmission of 3D meshes is a very challenging and timely problem for many applications, ranging from collaborative design to engineering. Additionally, frequent delays in transmissions call for progressive transmission in order for the end user to receive useful successive refinements of the final mesh. In this paper, we present a novel, fully progressive encoding approach for lossless transmission of triangle meshes with a very fine granularity. A new valence-driven decimating conquest, combined with patch tiling and an original strategic retriangulation is used to maintain the regularity of valence. We demonstrate that this technique leads to good mesh quality, near-optimal connectivity encoding, and therefore a good rate-distortion ratio throughout the transmission. We also improve upon previous lossless geometry encoding by decorrelating the normal and tangential components of the surface. For typical meshes, our method compresses connectivity down to less than 3.7 bits per vertex, 40{\%} better in average than the best methods previously reported [5, 18]; we further reduce the usual geometry bit rates by 20{\%} in average by exploiting the smoothness of meshes. Concretely, our technique can reduce an ascii VRML 3D model down to 1.7{\%} of its size for a 10-bit quantization (2.3{\%} for a 12-bit quantization) while providing a very progressive reconstruction.},
author = {Alliez, Pierre and Desbrun, Mathieu},
doi = {10.1145/383259.383281},
file = {:C$\backslash$:/Users/abbas/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Alliez, Desbrun - 2001 - Progressive Compression for Lossless Transmission of Triangle Meshes(2).pdf:pdf},
isbn = {158113374X},
journal = {Proceedings of the 28th annual conference on Computer graphics and interactive techniques},
keywords = {connectivity encoding,geometry encoding,levels of details,mesh decimation,progressive,progressive transmission,triangle mesh compression},
mendeley-tags = {progressive},
pages = {195--202},
title = {{Progressive Compression for Lossless Transmission of Triangle Meshes}},
url = {http://portal.acm.org/citation.cfm?doid=383259.383281},
year = {2001}
}
@article{Gumhold2005,
author = {Gumhold, Stefan},
file = {:C$\backslash$:/Users/abbas/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Gumhold - 2005 - Optimizing markov models with applications to triangular connectivity coding.pdf:pdf},
isbn = {0-89871-585-7},
journal = {Proceedings of annual ACM-SIAM Symposium on Discrete algorithms},
pages = {331--338},
title = {{Optimizing markov models with applications to triangular connectivity coding}},
year = {2005}
}
@article{Lee2002,
abstract = {We present Angle-Analyzer, a new single-rate compression algorithm for triangle-quad hybrid meshes. Using a carefully-designed geometry-driven mesh traversal and an efficient encoding of intrinsic mesh properties, Angle-Analyzer produces compression ratios 40{\%} better in connectivity and 20{\%} better in geometry than the leading Touma and Gotsman technique for the same level of geometric distortion. The simplicity and performance of this new technique is demonstrated, and we provide extensive comparative tests to contrast our results with the current state-of-the-art techniques.},
author = {Lee, Haeyoung and Alliez, Pierre and Desbrun, Mathieu},
doi = {10.1111/1467-8659.t01-1-00598},
file = {:C$\backslash$:/Users/abbas/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Lee, Alliez, Desbrun - 2002 - Angle-Analyzer A triangle-quad mesh codec.pdf:pdf},
issn = {01677055},
journal = {Computer Graphics Forum},
number = {3},
pages = {383--392},
title = {{Angle-Analyzer: A triangle-quad mesh codec}},
volume = {21},
year = {2002}
}
@article{Deering1995a,
abstract = {This paper introduces the concept of Geometry Compression, al- lowing 3D triangle data to be represented with a factor of 6 to 10 times fewer bits than conventional techniques, with only slight loss- es in object quality. The technique is amenable to rapid decompres- sion in both software and hardware implementations; if 3D render- ing hardware contains a geometry decompression unit, application geometry can be stored in memory in compressed format. Geome- try is first represented as a generalized triangle mesh, a data struc- ture that allows each instance of a vertex in a linear stream to spec- ify an average of two triangles. Then a variable length compression is applied to individual positions, colors, and normals. Delta com- pression followed by a modified Huffman compression is used for positions and colors; a novel table-based approach is used for nor- mals. The table allows any useful normal to be represented by an 18-bit index, many normals can be represented with index deltas of 8 bits or less. Geometry compression is a general space-time trade- off, and offers advantages at every level of the memory/intercon- nect hierarchy: less storage space is needed on disk, less transmis- sion time is needed on networks.},
author = {Deering, Michael},
doi = {10.1145/218380.218391},
file = {:C$\backslash$:/Users/abbas/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Deering - 1995 - Geometry compression.pdf:pdf},
isbn = {0897917014},
issn = {0097-8930},
journal = {Proceedings of the 22nd annual conference on {\ldots}},
pages = {13--20},
title = {{Geometry compression}},
url = {http://dl.acm.org/citation.cfm?id=218391},
year = {1995}
}
@inproceedings{Mamou2009a,
abstract = {Human crowd is a fascinating social phenomenon in nature. This paper presents our work on designing behavior model for virtual humans in a crowd simulation under normal-life and emergency situations. Our model adopts an agent-based approach and employs a layered framework to reflect the natural pattern of human-like decision making process, which generally involves a persons awareness of the situation and consequent changes on the internal attributes. The social group and crowd-related behaviors are modeled according to the findings and theories observed from social psychology (e.g., social attachment theory). By integrating our model into an agent execution process, each individual agent can response differently to the perceived environment and make realistic behavioral decisions based on various physiological, emotional, and social group attributes. To demonstrate the effectiveness of our model, a case study has been conducted, which shows that realistic human behaviors can be generated at both individual and group level.},
archivePrefix = {arXiv},
arxivId = {1708.05006},
author = {Mamou, Khaled and Zaharia, Titus and Pr{\^{e}}teux, Fran{\c{c}}oise},
booktitle = {Computer Animation and Virtual Worlds},
doi = {10.1002/cav.319},
eprint = {1708.05006},
file = {:C$\backslash$:/Users/abbas/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Mamou, Zaharia, Pr{\^{e}}teux - 2009 - TFAN A low complexity 3D mesh compression algorithm.pdf:pdf},
isbn = {9781586038229},
issn = {15464261},
keywords = {3D mesh compression,Low complexity,MPEG-4 standard,Real-time decoding},
pmid = {18391302},
title = {{TFAN: A low complexity 3D mesh compression algorithm}},
year = {2009}
}
@article{Aleardi,
author = {Aleardi, Luca Castelli},
file = {:C$\backslash$:/Users/abbas/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Aleardi - Unknown - Lecture 4 Mesh representations and applications.pdf:pdf},
title = {{Lecture 4: Mesh representations and applications}}
}
@article{King1999,
author = {King, Davis and Rossignac, Jarek},
file = {:C$\backslash$:/Users/abbas/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/King, Rossignac - 1999 - Guaranteed 3.67 v bit encoding of planar triangle graphs.pdf:pdf},
journal = {Canadian Conference on Computational Geometry},
keywords = {3d representations,geometry compression,graph encoding,triangle},
pages = {95--98},
title = {{Guaranteed 3.67 v bit encoding of planar triangle graphs}},
url = {http://citeseerx.ist.psu.edu/viewdoc/download?doi=10.1.1.70.5143{\&}rep=rep1{\&}type=pdf},
year = {1999}
}
@misc{Osfield2001,
abstract = {The OpenSceneGraph is an open source high performance 3D graphics toolkit, used by application developers in fields such as visual simulation, games, virtual reality, scientific visualization and modelling. Written entirely in Standard C++ and OpenGL it runs on all Windows platforms, OSX, GNU/Linux, IRIX, Solaris, HP-Ux, AIX and FreeBSD operating systems. The OpenSceneGraph is now well established as the world leading scene graph technology, used widely in the vis-sim, space, scientific, oil-gas, games and virtual reality industries.},
author = {Osfield, Robert},
keywords = {site},
mendeley-tags = {site},
title = {{OpenSceneGraph}},
url = {http://www.openscenegraph.org/ https://github.com/openscenegraph/OpenSceneGraph},
urldate = {2018-10-25},
year = {2001}
}
@article{Gumhold1999a,
author = {Gumhold, Stefan and Guthe, S and Stra{\ss}er, Wolfgang},
doi = {10.1109/VISUAL.1999.809868},
file = {:C$\backslash$:/Users/abbas/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Gumhold, Guthe, Stra{\ss}er - 1999 - Tetrahedral Mesh Compression with the cut-border machine.pdf:pdf},
isbn = {0-7803-5897-X},
journal = {Proceedings of IEEE Visualization'99},
keywords = {a vector field or,compression algorithms,even a tensor,function,of a,scalar,scientific vi-,solid modeling,sualization,the function can be,to parameterize the domain,using the barycentric coordinates,volume rendering},
number = {3},
pages = {51--58},
title = {{Tetrahedral Mesh Compression with the cut-border machine}},
volume = {3},
year = {1999}
}
@article{Chou2002,
abstract = {Rendering geometrically detailed 3D models requires the transfer and processing of large amounts of triangle and vertex geometry data. Compressing the geometry bitstream can reduce bandwidth requirements and alleviate transmission bottlenecks. In this paper, we show vector quantization to be an effective compression technique for triangle mesh vertex data. We present predictive vector quantization methods using unstructured codebooks as well as a product code pyramid vector quantizer. The technique is compatible with most existing mesh connectivity encoding schemes and does not require the use of entropy coding. In addition to compression, our vector quantization scheme can be used for complexity reduction by accelerating the computation of linear vertex transformations. Consequently, an encoded set of vertices can be both decoded and transformed in approximately 60 percent of the time required by a conventional method without compression.},
author = {Chou, Peter H. and Meng, Teresa H.},
doi = {10.1109/TVCG.2002.1044522},
file = {:C$\backslash$:/Users/abbas/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Chou, Meng - 2002 - Vertex data compression through vector quantization.pdf:pdf},
isbn = {1077-2626},
issn = {10772626},
journal = {IEEE Transactions on Visualization and Computer Graphics},
keywords = {Computer graphics,Data compression,Geometry compression,Vector quantization},
number = {4},
pages = {373--382},
title = {{Vertex data compression through vector quantization}},
volume = {8},
year = {2002}
}
@article{Cs2014,
author = {Cs, Cornell and Marschner, Steve},
file = {:C$\backslash$:/Users/abbas/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Cs, Marschner - 2014 - Triangle meshes I(2).pdf:pdf},
title = {{Triangle meshes I}},
year = {2014}
}
@incollection{Alliez2008,
abstract = {Remeshing is a key component of many geometric algorithms, including modeling, editing, animation and simulation. As such, the rapidly developing field of geometry processing has produced a profusion of new remeshing techniques over the past few years. In this paper we survey recent developments in remeshing of surfaces, focusing mainly on graphics applications. We classify the techniques into five categories based on their end goal: structured, compatible, high quality, feature and error-driven remeshing.We limit our description to the main ideas and intuition behind each technique, and a brief comparison between some of the techniques. We also list some open questions and directions for future research.},
author = {Alliez, Pierre and Ucelli, Giuliana and Gotsman, Craig and Attene, Marco},
booktitle = {Mathematics and Visualization},
doi = {10.1007/978-3-540-33265-7_2},
isbn = {978-3-540-33264-0},
issn = {2197666X},
title = {{Recent advances in remeshing of surfaces}},
year = {2008}
}
@article{Jamin2009,
abstract = {The preprocessing of large meshes to provide and optimize interactive visualization implies a complete reorganization that often introduces significant data growth. This is detrimental to storage and network transmission, but in the near future could also affect the efficiency of the visualization process itself, because of the increasing gap between computing times and external access times. In this article, we attempt to reconcile lossless compression and visualization by proposing a data structure that radically reduces the size of the object while supporting a fast interactive navigation based on a viewing distance criterion. In addition to this double capability, this method works out-of-core and can handle meshes containing several hundred million vertices. Furthermore, it presents the advantage of dealing with any n-dimensional simplicial complex, including triangle soups or volumetric meshes, and provides a significant rate-distortion improvement. The performance attained is near state-of-the-art in terms of the compression ratio as well as the visualization frame rates, offering a unique combination that can be useful in numerous applications. {\textcopyright} 2009 Elsevier Ltd. All rights reserved.},
author = {Jamin, Cl{\'{e}}ment and Gandoin, Pierre Marie and Akkouche, Samir},
doi = {10.1016/j.cag.2009.03.029},
file = {:C$\backslash$:/Users/abbas/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Jamin, Gandoin, Akkouche - 2009 - CHuMI viewer Compressive huge mesh interactive viewer.pdf:pdf},
issn = {00978493},
journal = {Computers and Graphics (Pergamon)},
keywords = {Interactive visualization,Large meshes,Lossless compression,Out-of-core},
number = {4},
pages = {542--553},
title = {{CHuMI viewer: Compressive huge mesh interactive viewer}},
volume = {33},
year = {2009}
}
@article{Deering,
author = {Deering, Michael},
file = {:C$\backslash$:/Users/abbas/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Deering - Unknown - Geometry Decompression Hardware.pdf:pdf},
keywords = {compressed geometry, geometry compression, hardwar},
title = {{Geometry Decompression Hardware}}
}
@misc{Skibinski2015,
abstract = {lzbench is an in-memory benchmark of open-source LZ77/LZSS/LZMA compressors. It joins all compressors into a single exe. At the beginning an input file is read to memory. Then all compressors are used to compress and decompress the file and decompressed file is verified. This approach has a big advantage of using the same compiler with the same optimizations for all compressors. The disadvantage is that it requires source code of each compressor (therefore Slug or lzturbo are not included).},
author = {Skibinski, Przemyslaw},
keywords = {program,site},
mendeley-tags = {program,site},
title = {lzbench},
url = {https://github.com/inikep/lzbench},
year = {2015}
}
@misc{Brettle2017,
abstract = {Draco is a library for compressing and decompressing 3D geometric meshes and point clouds. It is intended to improve the storage and transmission of 3D graphics.
Draco was designed and built for compression efficiency and speed. The code supports compressing points, connectivity information, texture coordinates, color information, normals, and any other generic attributes associated with geometry. With Draco, applications using 3D graphics can be significantly smaller without compromising visual fidelity. For users, this means apps can now be downloaded faster, 3D graphics in the browser can load quicker, and VR and AR scenes can now be transmitted with a fraction of the bandwidth and rendered quickly.},
author = {Brettle, Jamieson and Galligan, Frank},
keywords = {site},
mendeley-tags = {site},
title = {{Draco – opensource.google.com}},
url = {https://opensource.google.com/projects/draco},
urldate = {2018-10-20},
year = {2017}
}
@article{Evans1996,
abstract = {Almost all scientific visualization involving surfaces is currently done via triangles. The speed at which such triangulated sur- faces can be displayed is crucial to interactive visualization and is bounded by the rate at which triangulated data can be sent to the graphics subsystemfor rendering. Partitioning polygonalmod- els into triangle strips can significantly reduce rendering times over transmitting each triangle individually. In this paper, we present new and efficient algorithms for con- structing triangle strips from partially triangulated models, and ex- perimental results showing these strips are on average ?? {\%} better than those from previous codes. Further, we study the impact of larger buffer sizes and various queuing disciplines on the effective- ness of triangle strips.},
author = {Evans, F. and Skiena, S. and Varshney, A.},
doi = {10.1109/VISUAL.1996.568125},
file = {:C$\backslash$:/Users/abbas/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Evans, Skiena, Varshney - 1996 - Optimizing triangle strips for fast rendering.pdf:pdf},
isbn = {0-89791-864-9},
issn = {0-89791-864-9},
journal = {Proceedings of Seventh Annual IEEE Visualization '96},
pages = {319--326},
title = {{Optimizing triangle strips for fast rendering}},
url = {http://ieeexplore.ieee.org/document/568125/},
year = {1996}
}
@article{Gurung2013,
author = {Gurung, Topraj},
file = {:C$\backslash$:/Users/abbas/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Gurung - 2013 - Compact Connectivity Representation for Triangle Meshes Compact Connectivity Representation for.pdf:pdf},
number = {May},
title = {{Compact Connectivity Representation for Triangle Meshes Compact Connectivity Representation for}},
year = {2013}
}
@article{Sim2003,
abstract = {In this paper, we propose an efficient compression algorithm for 3D triangular meshes, consisting of topology data and geometry data. First, the vertex degree warping technique is proposed to compress the topology data losslessly. The proposed algorithm exploits the geometrical information to efficiently encode the topology data, while most conventional algorithms process the topology data and the geometry data independently. Second, the dual parallelogram prediction technique is proposed as an effective geometry prediction scheme. By using forward and backward parallelograms, the proposed algorithm provides smaller prediction errors than the conventional parallelogram prediction scheme. Simulation results on various mesh models demonstrate that the proposed algorithm yields higher compression ratio than the conventional mesh compression algorithms proposed in Touma and Gotsman (Proceedings of the Graphics Interface '98, June 1998, pp. 26-34) and MPEG-4 3D mesh coding (3DMC) standard ISO/IEC 14496-2 (Information Technology - Coding of Audio-Visual Objects - Part 2: Visual, July 2001. The 3DMC reference software is available from http://www.sait.samsung.co.kr/shhc). {\textcopyright} 2002 Elsevier Science B.V. All rights reserved.},
author = {Sim, Jae Young and Kim, Chang Su and Lee, Sang Uk},
doi = {10.1016/S0923-5965(02)00090-5},
file = {:C$\backslash$:/Users/abbas/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Sim, Kim, Lee - 2003 - An efficient 3D mesh compression technique based on triangle fan structure.pdf:pdf},
issn = {09235965},
journal = {Signal Processing: Image Communication},
keywords = {3D mesh compression,Dual parallelogram prediction,Vertex degree warping},
number = {1},
pages = {17--32},
title = {{An efficient 3D mesh compression technique based on triangle fan structure}},
volume = {18},
year = {2003}
}
@article{Ibarria2003,
abstract = {We present a simple method for compressing very large and regularly sampled scalar fields. Our method is particularly attractive when the entire data set does not fit in memory and when the sampling rate is high relative to the feature size of the scalar field in all dimensions. Although we report results for R(3) and R(4) data sets, the proposed approach may be applied to higher dimensions. The method is based on the new Lorenzo predictor introduced here, which estimates the value of the scalar field at each sample from the values at processed neighbors. The predicted values are exact when the n-dimensional scalar field is an implicit polynomial of degree n - 1. Surprisingly, when the residuals (differences between the actual and predicted values) are encoded using arithmetic coding, the proposed method often outperforms wavelet compression in an L(infinity) sense. The proposed approach may be used both for lossy and lossless compression and is well suited for out-of-core compression and decompression, because a trivial implementation, which sweeps through the data set reading it once, requires maintaining only a small buffer in core memory, whose size barely exceeds a single (n - 1)-dimensional slice of the data.},
author = {Ibarria, Lawrence and Lindstrom, Peter and Rossignac, Jarek and Szymczak, Andrzej},
doi = {10.1111/1467-8659.00681},
file = {:C$\backslash$:/Users/abbas/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Ibarria et al. - 2003 - Out-of-core compression and decompression of large n-dimensional scalar fields.pdf:pdf},
isbn = {0167-7055},
issn = {01677055},
journal = {Computer Graphics Forum},
number = {3},
pages = {343--348},
title = {{Out-of-core compression and decompression of large n-dimensional scalar fields}},
volume = {22},
year = {2003}
}
@article{Gumhold1998,
abstract = {In this paper we introduce a new compressed representation for the connectivity of a triangle mesh. We present local compression and decompression algorithms which are fast enough for real time ap- plications. The achieved space compression rates keep pace with the best rates reported for any known global compression algorithm. These nice properties have great benefits for several important ap- plications. Naturally, the technique can be used to compress triangle meshes without significant delay before they are stored on external devices or transmitted over a network. The presented decompression algorithm is very simple allowing a possible hardware realization of the decompression algorithm which could significantly increase the rendering speed of pipelined graphics hardware.},
author = {Gumhold, Stefan and Stra{\ss}er, Wolfgang},
doi = {10.1145/280814.280836},
file = {:C$\backslash$:/Users/abbas/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Gumhold, Stra{\ss}er - 1998 - Real Time Compression of Triangle Mesh Connectivity.pdf:pdf},
isbn = {0897919998},
issn = {00978930},
journal = {SIGGRAPH 98 Proceedings of the 25th annual conference on Computer graphics and interactive techniques},
keywords = {3d graphics hard-,algorithms,cut border machine,mesh compression},
mendeley-tags = {cut border machine},
number = {Annual Conference Series},
pages = {133--140},
title = {{Real Time Compression of Triangle Mesh Connectivity}},
volume = {32},
year = {1998}
}
@inproceedings{Oral2017,
abstract = {3D graphics are evolving media type used in all aspects of technological areas of today. Increase in demand on 3D graphics pushes technological advancements on 3D scan technology and approximation methods to next level which then results in more complex and highly detailed large 3D raw data. Thus, it is crucial to compress these graphics data efficiently. Over the last two decades, many algorithms have been proposed to compress these raw 3D data especially for compact storage, fast transmission, and efficient processing. Compression methods are branching among themselves. In this paper, 3D compression methods are summarized in a taxonomical fashion. A special attention is paid to the main ideas behind the single-rate compression algorithms and their contribution to 3D mesh compression technology. The advantages and the drawbacks of each algorithm are discussed to pave the road for the future 3D compression researchers.},
author = {Oral, Mustafa and Elmas, Ammar Abbas},
booktitle = {2nd International Mediterranean Science and Engineering Congress (IMSEC 2017)},
file = {:C$\backslash$:/Users/abbas/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Oral, Elmas - 2017 - A Brief History of 3D Mesh Compression.pdf:pdf},
keywords = {3D scan,mesh compression,polygonal mesh,single-rate,triangulation},
number = {Imsec},
pages = {136--140},
title = {{A Brief History of 3D Mesh Compression}},
year = {2017}
}
@misc{Mahoney2009,
abstract = {zpaq is a free and open source incremental, journaling command-line archiver},
author = {Mahoney, Matt},
keywords = {program,site},
mendeley-tags = {program,site},
title = {{ZPAQ Incremental Journaling Backup Utility and Archiver}},
url = {http://mattmahoney.net/dc/zpaq.html},
year = {2009}
}
@article{Khodakovsky2002,
abstract = {Encoders for triangle mesh connectivity based on enumeration of vertex valences are among the best reported to date. They are both simple to implement and report the best compressed file sizes for a large corpus of test models. Additionally they have recently been shown to be near-optimal since they realize the Tutte entropy bound for all planar triangulations. In this paper we introduce a connectivity encoding method which extends these ideas to 2-manifold meshes consisting of faces with arbitrary degree. The encoding algorithm exploits duality by applying valence enumeration to both the primal and the dual mesh in a symmetric fashion. It generates two sequences of symbols, vertex valences, and face degrees, and encodes them separately using two context-based arithmetic coders. This allows us to exploit vertex or face regularity if present. When the mesh exhibits perfect face regularity (e.g., a pure triangle or quad mesh) or perfect vertex regularity (valence six or four respectively) the corresponding bit rate vanishes to zero asymptotically. For triangle meshes, our technique is equivalent to earlier valence-driven approaches. We report compression results for a corpus of standard meshes. In all cases we are able to show coding gains.},
author = {Khodakovsky, Andrei and Alliez, Pierre and Desbrun, Mathieu and Schr{\"{o}}der, Peter},
doi = {10.1006/gmod.2002.0575},
file = {:C$\backslash$:/Users/abbas/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Khodakovsky et al. - 2002 - Near-optimal connectivity encoding of 2-manifold polygon meshes.pdf:pdf},
issn = {15240703},
journal = {Graphical Models},
number = {3-4},
pages = {147--168},
title = {{Near-optimal connectivity encoding of 2-manifold polygon meshes}},
volume = {64},
year = {2002}
}
@article{Ws,
author = {Ws, Akcg},
file = {:C$\backslash$:/Users/abbas/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Ws - Unknown - Mesh compression ( I ).pdf:pdf},
number = {I},
title = {{Mesh compression ( I )}}
}
@misc{Alliez2001a,
abstract = {This page illustrates the connectivity encoding technique described in "Valence-Driven Connectivity Encoding for 3D Meshes". The application is an implementation of the paper. It opens wrl 97 files containing one 2-manifold triangle mesh, and allows the user to visualize the edge-centered conquest described in the paper. Several visual debugging options are available. Resulting bit-rates are given in bits per vertex. The executable has been compiled for a Win32 platform (Windows 9x/NT/2000) and uses the OpenGL library. Some animations are also available.},
author = {Alliez, Pierre and Desbrun, Mathieu},
keywords = {site},
mendeley-tags = {site},
title = {{Valence-Driven Connectivity Encoding for 3D Meshes}},
url = {http://www.geometry.caltech.edu/SingleRateEncoder/},
urldate = {2018-10-17},
year = {2001}
}
@article{,
file = {:C$\backslash$:/Users/abbas/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Unknown - Unknown - Basic Concepts of Topology.pdf:pdf},
title = {{Basic Concepts of Topology}}
}
@article{Isenburg2004,
abstract = {The geometric data sets found in scientific and industrial applications are often very detailed. Storing them using standard uncompressed formats results in large files that are expensive to store and slow to load and transmit. Many efficient mesh compression techniques have been proposed, but scientists and engineers often refrain from using them because they modify the mesh data. While connectivity is encoded in a lossless manner, the floating-point coordinates associated with the vertices are quantized onto a uniform integer grid for efficient predictive compression. Although a fine enough grid can usually represent the data with sufficient precision, the original floatingpoint values will change, regardless of grid resolution. In this paper we describe how to compress floating-point coordinates using predictive coding in a completely lossless manner. The initial quantization step is omitted and predictions are calculated in floating-point. The predicted and the actual floating-point values are then broken up into sign, exponent, and mantissa and their corrections are compressed separately with context-based arithmetic coding. As the quality of the predictions varies with the exponent, we use the exponent to switch between different arithmetic contexts. Although we report compression results using the popular parallelogram predictor, our approach works with any prediction scheme. The achieved bit-rates for lossless floating-point compression nicely complement those resulting from uniformly quantizing with different precisions.},
author = {Isenburg, Martin and Lindstrom, Peter and Snoeyink, Jack},
doi = {10.1080/16864360.2004.10738292},
file = {:C$\backslash$:/Users/abbas/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Isenburg, Lindstrom, Snoeyink - 2004 - Lossless Compression of Floating-Point Geometry.pdf:pdf},
issn = {16864360},
journal = {Computer-Aided Design and Applications},
keywords = {Floating-point,Geometry coding,Lossless,Mesh compression},
number = {1-4},
pages = {495--501},
title = {{Lossless Compression of Floating-Point Geometry}},
volume = {1},
year = {2004}
}
@article{Rossignac1999a,
author = {Rossignac, Jarek and Szymczak, Andrzej},
file = {:C$\backslash$:/Users/abbas/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Rossignac, Szymczak - 1999 - Wrap {\&} Zip decompression of the connectivity of triangle meshes.pdf:pdf},
keywords = {06,94,by kbn under,cc,compression,connectivity,corresponding author,e-mail,edu,encoding,gatech,grant 0449,jarek,p3,the national science foundation,this material is based,triangle graphs,triangle mesh,under grant 9721358 and,upon work supported by},
pages = {119--135},
title = {{Wrap {\&} Zip decompression of the connectivity of triangle meshes compressed with Edgebreaker}},
volume = {14},
year = {1999}
}
@inproceedings{Botsch:2006:GMB:1185657.1185839,
address = {New York, NY, USA},
author = {Botsch, Mario and Pauly, Mark and Rossl, Christian and Bischoff, Stephan and Kobbelt, Leif},
booktitle = {ACM SIGGRAPH 2006 Courses},
doi = {10.1145/1185657.1185839},
file = {:C$\backslash$:/Users/abbas/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Botsch et al. - 2006 - Geometric Modeling Based on Triangle Meshes.pdf:pdf},
isbn = {1-59593-364-6},
publisher = {ACM},
series = {SIGGRAPH '06},
title = {{Geometric Modeling Based on Triangle Meshes}},
url = {http://doi.acm.org/10.1145/1185657.1185839},
year = {2006}
}
@article{Isenburg2002,
abstract = {Unstructured hexahedral volume meshes are of particular interest for visualization and simulation applications. They allow regular tiling of the three-dimensional space and show good numerical behaviour in finite element computations. Beside such appealing properties, volume meshes take up huge amounts of space when stored in a raw format. In this paper, we present a technique for encoding the connectivity and geometry of unstructured hexahedral volume meshes. For connectivity compression, we generalize the concept of coding with degrees from the surface to the volume case. In contrast to the connectivity of surface meshes, which can be coded as a sequence of vertex degrees, the connectivity of volume meshes is coded as a sequence of edge degrees. This naturally exploits the regularity of typical hexahedral meshes. We achieve compression rates of around 1.5 bits per hexahedron (bph) that go down to 0.18 bph for regular meshes. On our test meshes the average connectivity compression ratio is 1:162.7. For geometry compression, we perform simple parallelogram prediction on uniformly quantized vertices within the side of a hexahedron. Tests show an average geometry compression ratio of 1:3.7 at a quantization level of 16 bits.},
author = {Isenburg, Martin and Alliez, Pierre},
doi = {10.1109/PCCGA.2002.1167872},
file = {:C$\backslash$:/Users/abbas/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Isenburg, Alliez - 2002 - Compressing hexahedral volume meshes.pdf:pdf},
isbn = {0769517846},
issn = {15504085},
journal = {Proceedings - Pacific Conference on Computer Graphics and Applications},
keywords = {Computational geometry,Computational modeling,Computer graphics,Computer industry,Data visualization,Encoding,Finite element methods,Quantization,Testing,Thermodynamics,isenburg},
mendeley-tags = {isenburg},
pages = {284--293},
title = {{Compressing hexahedral volume meshes}},
volume = {2002-Janua},
year = {2002}
}
@article{Coors2004,
abstract = {Delphi is a new geometry-guided predictive scheme for compressing the connectivity of triangle meshes. Both compression and decompression algorithms traverse the mesh using the EdgeBreaker state machine. However, instead of encoding the EdgeBreaker clers symbols that capture connectivity explicitly, they estimate the location of the unknown vertex, v, of the next triangle. If the predicted location lies sufficiently close to the nearest vertex, w, on the boundary of the previously traversed portion of the mesh, then Delphi estimates that v coincides with w. When the guess is correct, a single confirmation bit is encoded. Otherwise, additional bits are used to encode the rectification of that prediction. When v coincides with a previously visited vertex that is not adjacent to the parent triangle (EdgeBreaker S case), the offset, which identifies the vertex v, must be encoded, mimicking the cut-border machine compression proposed by Gumhold and Strasser. On models where 97{\%} of Delphi predictions are correct, the connectivity is compressed down to 0.19 bits per triangle. Compression rates decrease with the frequency of wrong predictors, but remains below 1.50 bits per triangle for all models tested.},
author = {Coors, Volker and Rossignac, Jarek},
doi = {10.1007/s00371-004-0255-1},
file = {:C$\backslash$:/Users/abbas/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Coors, Rossignac - 2004 - Delphi Geometry-based connectivity prediction in triangle mesh compression.pdf:pdf},
isbn = {0178-2789},
issn = {01782789},
journal = {Visual Computer},
keywords = {Connectivity coding,Geometric prediction,Geometry compression,Triangle meshes},
number = {8-9},
pages = {507--520},
title = {{Delphi: Geometry-based connectivity prediction in triangle mesh compression}},
volume = {20},
year = {2004}
}
@misc{Geelnard2009,
abstract = {OpenCTM — the Open Compressed Triangle Mesh file format — is a file format, a software library and a tool set for compression of 3D triangle meshes. The geometry is compressed to a fraction of comparable file formats (3DS, STL, VRML, COLLADA...), and the format is easily accessible through a simple, portable API.},
author = {Geelnard, Marcus},
keywords = {site},
mendeley-tags = {site},
title = {{OpenCTM - Compression of 3D triangle meshes}},
url = {http://openctm.sourceforge.net/ https://github.com/Danny02/OpenCTM},
urldate = {2018-10-22},
year = {2009}
}
@article{Storer1982,
author = {Storer, James A and Szymanski, Thomas G},
doi = {10.1145/322344.322346},
journal = {J. ACM},
number = {4},
pages = {928--951},
title = {{Data compression via textual substitution.}},
url = {https://doi.org/10.1145/322344.322346 https://www.wikidata.org/entity/Q57479682},
volume = {29},
year = {1982}
}
@article{Shikhare2000,
author = {Shikhare, Dinesh},
file = {:C$\backslash$:/Users/abbas/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Shikhare - 2000 - State of the Art in Geometry Compression.pdf:pdf},
title = {{State of the Art in Geometry Compression}},
year = {2000}
}
@misc{Muravyov2016,
author = {Muravyov, Ilya},
keywords = {program,site},
mendeley-tags = {program,site},
title = {{Balz}},
url = {https://sourceforge.net/projects/balz/},
year = {2016}
}
@article{turan1984succinct,
author = {Tur{\'{a}}n, Gy{\"{o}}rgy},
file = {:C$\backslash$:/Users/abbas/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Tur{\'{a}}n - 1984 - On the succinct representation of graphs.pdf:pdf},
journal = {Discrete Applied Mathematics},
number = {3},
pages = {289--294},
publisher = {North-Holland},
title = {{On the succinct representation of graphs}},
volume = {8},
year = {1984}
}
@article{Isenburg2005a,
author = {Isenburg, Martin},
file = {:C$\backslash$:/Users/abbas/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Isenburg - 2005 - Compression and Streaming of Polygon Meshes.pdf:pdf},
journal = {Science},
title = {{Compression and Streaming of Polygon Meshes}},
year = {2005}
}
@article{ISENBURG2005,
abstract = {Current mesh compression schemes encode triangles and vertices in an order derived from systematically travers- $\backslash$ning the connectivity graph. These schemes struggle with gigabyte-sized mesh input where the construction and the $\backslash$nusage of the data structures that support topological traversal queries become I/O-inefficient and require large $\backslash$namounts of temporary disk space. Furthermore they expect the entire mesh as input. Since meshes cannot be $\backslash$ncompressed until their generation is complete, they have to be stored at least once in uncompressed form. $\backslash$nWe radically depart from the traditional approach to mesh compression and propose a scheme that incrementally $\backslash$nencodes a mesh in the order it is given to the compressor using only minimal memory resources. This makes $\backslash$nthe compression process essentially transparent to the user and practically independent of the mesh size. This is $\backslash$nespecially beneficial for compressing large meshes, where previous approaches spend significant memory, disk, $\backslash$nand I/O resources on pre-processing, whereas our scheme starts compressing after receiving the first few triangles. $\backslash$n},
author = {ISENBURG, Martin and LINDSTROM, Peter and SNOEYINK, Jack},
doi = {10.1145/1187112.1187276},
file = {:C$\backslash$:/Users/abbas/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/ISENBURG, Martin, LINDSTROM, Peter, et SNOEYINK - 2005 - Streaming Compression of Triangle Meshes.pdf:pdf},
journal = {ACM SIGGRAPH 2005 Sketches. ACM},
pages = {1--8},
title = {{Streaming Compression of Triangle Meshes}},
year = {2005}
}
@article{Chow1997,
abstract = {Most existing visualization applications use 3D geometry as their basic rendering primitive. As users demand more complex data sets, the memory requirements for retrieving and storing large 3D models are becoming excessive. In addition, the current 3D rendering hardware is facing a large memory bus bandwidth bottleneck at the processor to graphics pipeline interface. Rendering 1 million triangles with 24 bytes per triangle at 30 Hz requires as much as 720 MB/sec memory bus bandwidth. This transfer rate is well beyond the current low-cost graphics systems. A solution is to compress the static 3D geometry as an off-line pre-process. Then, only the compressed geometry needs to be stored in main memory and sent down to the graphics pipeline for real-time decompression and rendering. The author presents several new techniques for compression of 3D geometry that produce 2 to 3 times better compression ratios than existing methods. They first introduce several algorithms for the efficient encoding of the original geometry as generalized triangle meshes. This encoding allows most of the mesh vertices to be reused when forming new triangles. Their second contribution allows various parts of a geometric model to be compressed with different precision depending on the level of details present. Together, the meshifying algorithms and the variable compression method achieve compression ratios of 30 and 37 to one over ASCII encoded formats and 10 and 15 to one over binary encoded triangle strips. The experimental results show a dramatically lowered memory bandwidth required for real-time visualization of complex data sets.},
author = {Chow, M M},
doi = {10.1109/VISUAL.1997.663902},
file = {:C$\backslash$:/Users/abbas/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Chow - 1997 - Optimized geometry compression for real-time rendering.pdf:pdf},
isbn = {1},
journal = {Visualization '97., Proceedings},
pages = {347--354},
title = {{Optimized geometry compression for real-time rendering}},
year = {1997}
}
@article{Lawrence2008,
author = {Lawrence, Jason},
file = {:C$\backslash$:/Users/abbas/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Lawrence - 2008 - Mesh Representation and Decimation.pdf:pdf},
title = {{Mesh Representation and Decimation}},
year = {2008}
}
@misc{Cignoni2005,
author = {Cignoni, Paolo and Ganovelli, Fabio and Ponchio, Federico},
keywords = {program,site},
mendeley-tags = {program,site},
title = {{Visualization and Computer Graphics Lib}},
url = {https://sourceforge.net/projects/vcg/ http://www.vcglib.net/},
year = {2005}
}
@article{Gurung2011,
author = {Gurung, Topraj and Luffel, Mark and Lindstrom, Peter and Rossignac, Jarek},
doi = {10.1145/1964921.1964962},
file = {:C$\backslash$:/Users/abbas/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Gurung et al. - 2011 - LR Compact Connectivity Representation for Triangle Meshes.pdf:pdf},
isbn = {9781450309431},
journal = {ACM SIGGRAPH 2011 papers on - SIGGRAPH '11},
keywords = {Hamiltonian cy,data structures,hamiltonian cycle,mesh connectivity,triangle meshes},
mendeley-tags = {data structures},
number = {212},
pages = {1},
title = {{LR: Compact Connectivity Representation for Triangle Meshes}},
url = {http://portal.acm.org/citation.cfm?doid=1964921.1964962},
volume = {1},
year = {2011}
}
@article{Meshing,
author = {Meshing, Csb},
file = {:C$\backslash$:/Users/abbas/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Meshing - Unknown - Triangle Meshes Mesh Compression Two main parts Connectivity Geometry Basic Definitions ( I ) Vertex node V vert.pdf:pdf},
pages = {1--20},
title = {{Triangle Meshes Mesh Compression Two main parts : Connectivity : Geometry : Basic Definitions ( I ) Vertex : node V vertices Edge : between 2 vertices - E edges Face : face between edges - F faces Basic Definitions ( II ) Valence of a vertex : number of e}}
}
@article{Krivograd2008,
abstract = {This paper introduces a new algorithm for the compression of manifold hexahedral meshes topology, using vertex degree. The topology compression consists of two parts-the mesh's boundary consisting of quadrilaterals is compressed first, and then the hexahedra are processed by the help of six commands. The topology compression algorithm has been matched against the best-known method to-date, and shows itself to be competitive. {\textcopyright} 2008 Elsevier Ltd. All rights reserved.},
author = {Krivograd, Sebastian and Trlep, Mladen and {\v{Z}}alik, Borut},
doi = {10.1016/j.cad.2008.10.013},
file = {:C$\backslash$:/Users/abbas/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Krivograd, Trlep, {\v{Z}}alik - 2008 - A hexahedral mesh connectivity compression with vertex degrees.pdf:pdf},
issn = {00104485},
journal = {CAD Computer Aided Design},
keywords = {Application-specific data,Compression,FEM hexahedral mesh,Geometry,Topology},
number = {12},
pages = {1105--1112},
publisher = {Elsevier Ltd},
title = {{A hexahedral mesh connectivity compression with vertex degrees}},
url = {http://dx.doi.org/10.1016/j.cad.2008.10.013},
volume = {40},
year = {2008}
}
@article{Ponchio2015,
abstract = {Hypertension affects one third of adults in the United States and is a major risk factor for heart disease and stroke. A previous report found differences in the prevalence of hypertension among racial/ethnic populations in the United States; blacks had a higher prevalence of hypertension, and Hispanics had the lowest use of antihypertensive medication. Recent variations in geographic differences in hypertension prevalence in the United States are less well known. To assess state-level trends in self-reported hypertension and treatment among U.S. adults, CDC analyzed 2005-2009 data from the Behavioral Risk Factor Surveillance System (BRFSS). The results indicated wide variation among states in the prevalence of self-reported diagnosed hypertension and use of antihypertensive medications. In 2009, the age-adjusted prevalence of self-reported hypertension ranged from 20.9{\%} in Minnesota to 35.9{\%} in Mississippi. The proportion reporting use of antihypertensive medications among those who reported hypertension ranged from 52.3{\%} in California to 74.1{\%} in Tennessee. From 2005 to 2009, nearly all states had an increased prevalence of self-reported hypertension, with percentage-point increases ranging from 0.2 for Virginia (from 26.9{\%} to 27.1{\%}) to 7.0 for Kentucky (from 27.5{\%} to 34.5{\%}). Overall, from 2005 to 2009, the prevalence of self-reported hypertension among U.S. adults increased from 25.8{\%} to 28.3{\%}. Among those reporting hypertension, the proportion using antihypertensive medications increased from 61.1{\%} to 62.6{\%}. Increased knowledge of the differences in self-reported prevalence of hypertension and use of antihypertensive medications by state can help in guiding programs to prevent heart disease, stroke, and other complications of uncontrolled hypertension, including those conducted by state and local public health agencies and health-care providers.},
author = {Ponchio, Federico and Dellepiane, Matteo},
doi = {10.1145/2775292.2775308},
file = {:C$\backslash$:/Users/abbas/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Ponchio, Dellepiane - 2015 - Fast decompression for web-based view-dependent 3D rendering.pdf:pdf},
isbn = {9781450336475},
issn = {1545-861X},
journal = {Proceedings of the 20th International Conference on 3D Web Technology - Web3D '15},
keywords = {3d compression,corto,multi-resolution,web visualization},
mendeley-tags = {corto},
pages = {199--207},
pmid = {23552224},
title = {{Fast decompression for web-based view-dependent 3D rendering}},
url = {http://dl.acm.org/citation.cfm?doid=2775292.2775308},
year = {2015}
}
@article{Diamanti,
author = {Diamanti, Olga},
file = {:C$\backslash$:/Users/abbas/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Diamanti - Unknown - Geometry Foundations Surface Representations II Conversions Connectivity vertices , edges , triangles.pdf:pdf},
title = {{Geometry Foundations : Surface Representations II Conversions Connectivity : vertices , edges , triangles}}
}
@article{Khodakovsky2004,
author = {Khodakovsky, Andrei and Guskov, Igor},
file = {:C$\backslash$:/Users/abbas/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Khodakovsky, Guskov - 2004 - Compression of Normal Meshes.pdf:pdf},
isbn = {978-3-540-40116-2},
journal = {Geometric Modeling for Scientific Visualization},
pages = {189--206},
title = {{Compression of Normal Meshes}},
year = {2004}
}
@article{Liu2010,
abstract = {In this article, we present an efficient connectivity compression algorithm for triangular meshes. It is a face-based, single resolution and lossless connectivity compression method. This method is an improvement on Edgebreaker. In the aspect of mesh traversing, we use adaptive mesh traversing method to make Split operations as few as possible, which are burdens of the compression ratio. In the aspect of Entropy encoding, a variable code-mode is well designed for every operator in the operator series, which is the result of mesh traversing. Then a binary strand can be obtained. And finally this binary strand is encoded by using adaptive arithmetic coding method. The compression ratio of our algorithm is obtained when all the operators in the series are encoded. In comparison to the previous best face-based encoding methods, our method can significantly improve the compression ratio. {\textcopyright} 2010 IEEE.},
author = {Liu, Ying and Dai, Mingli and Han, Zhongming and Duan, Dagao},
doi = {10.1109/ICACC.2010.5487188},
file = {:C$\backslash$:/Users/abbas/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Liu et al. - 2010 - An Edgebreaker {\&} code-mode based connectivity compression for triangular meshes.pdf:pdf},
isbn = {9781424458462},
journal = {Proceedings - 2nd IEEE International Conference on Advanced Computer Control, ICACC 2010},
keywords = {Adaptive arithmetic coding,Code-mode,Connectivity compression,Decode,Edgebreaker,Encode,Meshes},
pages = {96--101},
title = {{An Edgebreaker {\&} code-mode based connectivity compression for triangular meshes}},
volume = {2},
year = {2010}
}
@article{Bayazt2007,
author = {Bayazıt, Ulug and Orcay, Ozgur and Konur, Umut and Gurgen, Fikret S.},
file = {:C$\backslash$:/Users/abbas/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Orcay, Konur, Gurgen - 2007 - Predictive Vector Quantization of 3-D Polygonal Mesh Geometry.pdf:pdf},
journal = {Design},
pages = {1--4},
title = {{Predictive Vector Quantization of 3-D Polygonal Mesh Geometry}},
year = {2007}
}
@article{Isenburg2000b,
abstract = {Most schemes to compress the topology of a surface mesh have been developed for the lowest common denominator: triangulated meshes. We propose a scheme that handles the topology of arbitrary polygon meshes. It encodes meshes directly in their polygonal representation and extends to capture face groupings in a natural way. Avoiding the triangulation step we reduce the storage costs for typical polygon models that have group structures and property data.},
author = {Isenburg, Martin and Snoeyink, Jack},
doi = {10.1145/344779.344919},
file = {:C$\backslash$:/Users/abbas/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Isenburg, Snoeyink - 2000 - Face Fixer Compressing Polygon Meshes with Properties.pdf:pdf},
isbn = {1581132085},
journal = {Proceedings of the 27th annual conference on Computer graphics and interactive techniques},
keywords = {connectivity encoding,mesh compression},
number = {march},
pages = {263 -- 270},
title = {{Face Fixer : Compressing Polygon Meshes with Properties}},
url = {http://dl.acm.org/citation.cfm?id=344919},
year = {2000}
}
@misc{Isenburg2003a,
author = {Isenburg, Martin and Gumhold, Stefan},
keywords = {site},
mendeley-tags = {site},
title = {{Out-of-Core Compression for Gigantic Polygon Meshes}},
url = {http://www.cs.unc.edu/{~}isenburg/oocc/},
year = {2003}
}
@inproceedings{scopigno2017delivering,
author = {Scopigno, Roberto and Callieri, Marco and Dellepiane, Matteo and Ponchio, Federico and Potenziani, Marco},
booktitle = {Virtual Archaeology Review},
file = {:C$\backslash$:/Users/abbas/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Scopigno et al. - 2017 - Delivering and using 3D models on the web are we ready.pdf:pdf},
number = {17},
organization = {Universitat Polit{\`{e}}cnica de Val{\`{e}}ncia},
pages = {1--9},
title = {{Delivering and using 3D models on the web: are we ready?}},
volume = {8},
year = {2017}
}
@article{Isenburg2002a,
abstract = {We present a generalization of the geometry coder by Touma and Gotsman (1998) to polygon meshes. We let the polygon information dictate where to apply the parallelogram rule that they use to predict vertex positions. Since polygons tend to be fairly planar and fairly convex, it is beneficial to make predictions within a polygon rather than across polygons. This, for example, avoids poor predictions due to a crease angle between polygons. Up to 90 percent of the vertices can be predicted this way. Our strategy improves geometry compression by 10 to 40 percent depending on (a) how polygonal the mesh is and (b) on the quality (planarity/convexity) of the polygons.},
author = {Isenburg, Martin and Alliez, Pierre},
doi = {10.1109/VISUAL.2002.1183768},
file = {:C$\backslash$:/Users/abbas/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Isenburg, Alliez - 2002 - Compressing polygon mesh geometry with parallelogram prediction.pdf:pdf},
isbn = {0-7803-7498-3},
issn = {07135424},
journal = {IEEE Visualization, 2002. VIS 2002.},
keywords = {geometry cod-,ing,isenburg,linear prediction,mesh compression,parallelogram rule,polygon,polygon meshes},
mendeley-tags = {isenburg,polygon},
pages = {141--146},
title = {{Compressing polygon mesh geometry with parallelogram prediction}},
year = {2002}
}
@article{Li1998,
author = {Li, Jiankun and Kuo, C Jay},
file = {:C$\backslash$:/Users/abbas/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Li, Kuo - 1998 - A DUAL GRAPH APPROACH TO 3D TRIANGULAR MESH COMPRESSION.pdf:pdf},
isbn = {0818688211},
pages = {1--4},
title = {{A Dual Graph Approach to 3D Triangular Mesh Compression}},
year = {1998}
}
@article{duda2013asymmetric,
author = {Duda, Jarek},
file = {:C$\backslash$:/Users/abbas/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Duda - 2013 - Asymmetric numeral systems entropy coding combining speed of Huffman coding with compression rate of arithmetic coding.pdf:pdf},
journal = {arXiv preprint arXiv:1311.2540},
keywords = {draco,zstd},
mendeley-tags = {draco,zstd},
title = {{Asymmetric numeral systems: entropy coding combining speed of Huffman coding with compression rate of arithmetic coding}},
year = {2013}
}
@misc{Muravyov2008,
abstract = {BCM is a high-performance file compressor that utilizes advanced context modeling techniques to achieve a very high compression ratio.},
author = {Muravyov, Ilya},
keywords = {program,site},
mendeley-tags = {program,site},
title = {{BCM}},
url = {https://github.com/encode84/bcm},
urldate = {2018-12-17},
year = {2008}
}
@misc{Bouzidi2013,
abstract = {Compression Benchmark},
author = {Bouzidi, Hamid},
keywords = {program,site},
mendeley-tags = {program,site},
title = {{TurboBench}},
url = {https://github.com/powturbo/TurboBench},
year = {2013}
}
@article{witten1987arithmetic,
author = {Witten, Ian H and Neal, Radford M and Cleary, John G},
file = {:C$\backslash$:/Users/abbas/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Sibley et al. - 1987 - Computing Practices Arithmetic Coding for Data Coiupression.pdf:pdf},
journal = {Communications of the ACM},
number = {6},
pages = {520--540},
publisher = {ACM},
title = {{Arithmetic coding for data compression}},
volume = {30},
year = {1987}
}
@article{Bajaj1998,
abstract = {In this paper we introduce a compression scheme suitable for large CAD (Computer Aided Design) models. We assume as input a large model in IGES1 and VRML2 for- mats that encode eciently and transmit to a client for remote display and modeling. To this end we focus on two main issues: (a) space ecient encoding of IGES and VRML les, (b) fragmentation of the compressed model into small independent units that can be transmitted incrementally over unreliable data communication media. We extend previously published compression schemes to handle quadrilateral and general polygonal models still maintaining the layering structure used to organize the redun- dant information of the geometric model. In particular we implement polygon strips to handle general polyhedral meshes and we merge them together so that non-manifold geometry are also allowed. NURBS3 patches are encoded as quadrilaterals with prop- erties attached. The compressed output is a bit-stream partitioned into variable length blocks which can be transmitted independently. With this structure the transmission can be performed on a unreliable network allowing minimal retransmission in case of lost or corrupted blocks.},
author = {Bajaj, Chandrajit L. and Pascucci, Valerio and Zhuang, Guozhong},
file = {:C$\backslash$:/Users/abbas/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Bajaj, Pascucci, Zhuang - 1998 - Compression and coding of large cad models.pdf:pdf},
title = {{Compression and coding of large cad models.}},
year = {1998}
}
@article{Mahoney2015,
author = {Mahoney, Matt},
file = {:C$\backslash$:/Users/abbas/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Mahoney - 2015 - The ZPAQ Compression Algorithm.pdf:pdf},
isbn = {2959337875},
title = {{The ZPAQ Compression Algorithm}},
year = {2015}
}
@article{Nguyen,
author = {Nguyen, Thinh},
file = {:C$\backslash$:/Users/abbas/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Nguyen - Unknown - Lecture 7 Run-Length, Golomb, and Tunstall Codes.pdf:pdf},
title = {{Lecture 7: Run-Length, Golomb, and Tunstall Codes}}
}
@article{Devillers2004,
abstract = {The compression of geometric structures is a relatively new field of data compression. Since about 1995, several articles have dealt with the coding of meshes, using for most of them the following approach: the vertices of the mesh are coded in an order that partially contains the topology of the mesh. In the same time, some simple rules attempt to predict the position of each vertex from the positions of its neighbors that have been previously coded. We describe a compression algorithm whose principle is completely different: the coding order of the vertices is used to compress their coordinates, and then the topology of the mesh is reconstructed from the vertices. This algorithm achieves compression ratios that are slightly better than those of the currently available algorithms, and moreover, it allows progressive and interactive transmission of the meshes.},
author = {Devillers, O. and Gandoin, P.-M.},
doi = {10.1109/VISUAL.2000.885711},
file = {:C$\backslash$:/Users/abbas/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Devillers, Gandoin - 2004 - Geometric compression for interactive transmission.pdf:pdf},
isbn = {0-7803-6478-3},
journal = {Proceedings Visualization 2000. VIS 2000 (Cat. No.00CH37145)},
keywords = {a fast access to,be dramatically restricted without,coding,compression,especially for remote access,geometry,interactivity,mesh,plies,reconstruction,terrain models,the data,this im-,through low bandwidth lines},
pages = {319--326},
title = {{Geometric compression for interactive transmission}},
url = {http://ieeexplore.ieee.org/document/885711/},
year = {2004}
}
@article{Zhu2013,
author = {Zhu, Junjie},
file = {:C$\backslash$:/Users/abbas/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Zhu - 2013 - by.pdf:pdf},
number = {July},
title = {by},
year = {2013}
}
@misc{Isenburg2002b,
abstract = {We offer an easily accessible online Web implementation and a downloadable standalone version of our Polygon Mesh Compressor in pure Java. This software is meant to provide benchmark bit-rates for future research in the area of mesh compression. It compresses not only the connectivity (conn) and geometry (geom) of a polygon mesh, but also one optional layer of texture coordinates. This is accomplished by compressing the texture coordinate mapping (texmap) and the texture coordinate values (texval). Hence, our compressor provides those four benchmark rates: conn, geom, texmap, and texval.},
author = {Isenburg, Martin and Alliez, Pierre and Snoeyink, Jack},
keywords = {site},
mendeley-tags = {site},
title = {{Benchmark Coding for Polygon Mesh Compression and Triangle Mesh Compression}},
url = {http://www.cs.unc.edu/{~}isenburg/pmc/},
urldate = {2018-10-17},
year = {2002}
}
@article{Cai2009,
author = {Cai, Kangying and Jin, Yu and Wang, Wencheng and Chen, QuQing and Chen, Zhibo and Teng, Jun},
doi = {10.1145/1670252.1670283},
file = {:C$\backslash$:/Users/abbas/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Cai et al. - 2009 - Compression of massive models by efficiently exploiting repeated patterns.pdf:pdf},
isbn = {9781605589121},
journal = {Proceedings of the ACM Symposium on Virtual Reality Software and Technology},
keywords = {automatic discovery,been well exploited,compression,compression of massive models,ficiency,however,in this work,left for efficient,repeated pattern,repetitive characteristics have not,there is much room,thus},
pages = {229--230},
title = {{Compression of massive models by efficiently exploiting repeated patterns}},
year = {2009}
}
@inproceedings{DeFloriani:2003:SDS:882370.882380,
address = {Aire-la-Ville, Switzerland, Switzerland},
author = {{De Floriani}, Leila and Hui, Annie},
booktitle = {Proceedings of the 2003 Eurographics/ACM SIGGRAPH Symposium on Geometry Processing},
isbn = {1-58113-687-0},
pages = {72--82},
publisher = {Eurographics Association},
series = {SGP '03},
title = {{A Scalable Data Structure for Three-dimensional Non-manifold Objects}},
url = {http://dl.acm.org/citation.cfm?id=882370.882380},
year = {2003}
}
@article{Puppo,
author = {Puppo, Enrico},
file = {:C$\backslash$:/Users/abbas/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Puppo - Unknown - A surface made of polygonal faces glued at common edges.pdf:pdf},
title = {{A surface made of polygonal faces glued at common edges}}
}
@misc{Rchoetzlein2009,
author = {Rchoetzlein},
keywords = {image},
mendeley-tags = {image},
title = {{Elements of polygonal mesh modeling}},
url = {https://en.wikipedia.org/wiki/Polygon{\_}mesh{\#}/media/File:Mesh{\_}overview.svg},
year = {2009}
}
@article{Cs2014a,
author = {Cs, Cornell and Marschner, Steve},
file = {:C$\backslash$:/Users/abbas/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Cs, Marschner - 2014 - Triangle meshes I.pdf:pdf},
title = {{Triangle meshes I}},
year = {2014}
}
@misc{Rossignac1999b,
author = {Rossignac, Jarek},
keywords = {site},
mendeley-tags = {site},
title = {{Rossignac's Edgebreaker 3D Compression for Triangle Meshes}},
url = {https://www.cc.gatech.edu/{~}jarek/edgebreaker/eb/},
urldate = {2018-10-17},
year = {1999}
}
@misc{Ponchio2015a,
abstract = {Corto is a library for compression and decompression meshes and point clouds (C++ and Javascript).},
author = {Ponchio, Federico},
keywords = {site},
mendeley-tags = {site},
title = {{Corto}},
url = {http://vcg.isti.cnr.it/corto/index.html{\#}overview},
urldate = {2018-10-22},
year = {2015}
}
@article{King1999a,
abstract = {To use 3D models on the Internet or in other bandwidth-limited applications, it is often necessary to compress their triangle mesh representations.We consider the problem of balancing two forms of lossy mesh compression: reduction of the number of vertices by simplification, and reduction of the number of bits per vertex coordinate. Let A(V,B) be a trianglemesh approximation for an original modelO. Suppose that A(V,B) has V vertices, each represented using B bits per coordinate. Given a limit F on the file size for A(V,B), what are the optimal values of B and V that minimize the approximation error? Given a desired error boundE, what are optimal B and V ,and how many total bits are needed?We develop answers to these questions by using a shape complexity measure K, which, for any given object approximates the product EV.We give formulae linking B, V, F, E and K,and we explore a simple algorithm for estimating K and the optimal B and V for piecewise spherical approximations of arbitrary triangle meshes.},
author = {King, Davis and Rossignac, Jarek},
file = {:C$\backslash$:/Users/abbas/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/King, Rossignac - 1999 - Optimal bit allocation in 3D compression.pdf:pdf},
journal = {Journal of Computational Geometry, Theory and Applications},
pages = {91--118},
title = {{Optimal bit allocation in 3D compression}},
volume = {14},
year = {1999}
}
@inproceedings{Isenburg2005b,
abstract = { Recent years have seen an immense increase in the complexity of geometric data sets. Today's gigabyte-sized polygon models can no longer be completely loaded into the main memory of common desktop PCs. Unfortunately, current mesh formats, which were designed years ago when meshes were orders of magnitudes smaller, do not account for this. Using such formats to store large meshes is inefficient and complicates all subsequent processing. We describe a streaming format for polygon meshes that is simple enough to replace current offline mesh formats and is more suitable for representing large data sets. Furthermore, it is an ideal input and output format for I/O-efficient out-of-core algorithms that process meshes in a streaming, possibly pipelined, fashion. This paper chiefly concerns the underlying theory and the practical aspects of creating and working with this new representation. In particular, we describe desirable qualities for streaming meshes and methods for converting meshes from a traditional to a streaming format. A central theme of this paper is the issue of coherent and compatible layouts of the mesh vertices and polygons. We present metrics and diagrams that characterize the coherence of a mesh layout and suggest appropriate strategies for improving its "streamability". To this end, we outline several out-of-core algorithms for reordering meshes with poor coherence, and present results for a menagerie of well known and generally incoherent surface meshes.},
author = {Isenburg, Martin and Lindstrom, Peter},
booktitle = {Proceedings of the IEEE Visualization Conference},
doi = {10.1109/VIS.2005.94},
isbn = {0780394623},
issn = {0780394623},
title = {{Streaming meshes}},
year = {2005}
}
@article{Gurung2013a,
abstract = {We propose Zipper, a compact representation of incidence and adjacency for manifold triangle meshes with fixed connectivity. Zipper uses on average only 6 bits per triangle, can be constructed in linear space and time, and supports all standard random-access and mesh traversal operators in constant time. Similarly to the previously proposed LR (Laced Ring) approach, the Zipper construction reorders vertices and triangles along a nearly Hamiltonian cycle called the ring. The 4.4× storage reduction of Zipper over LR results from three contributions. (1) For most triangles, Zipper stores a 2-bit delta (plus three additional bits) rather than a full 32-bit reference. (2) Zipper modifies the ring to reduce the number of exceptional triangles. (3) Zipper encodes the remaining exceptional triangles using 2.5× less storage. In spite of these large savings in storage, we show that Zipper offers comparable performance to LR and other data structures in mesh processing applications. Zipper may also serve as a compact indexed format for rendering meshes, and hence is valuable even in applications that do not require adjacency information. {\textcopyright} 2012 Elsevier Ltd. All rights reserved.},
author = {Gurung, Topraj and Luffel, Mark and Lindstrom, Peter and Rossignac, Jarek},
doi = {10.1016/j.cad.2012.10.009},
file = {:C$\backslash$:/Users/abbas/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Gurung et al. - 2013 - Zipper A compact connectivity data structure for triangle meshes.pdf:pdf},
isbn = {0010-4485},
issn = {00104485},
journal = {CAD Computer Aided Design},
keywords = {Differential coding,Hamiltonian cycle,Mesh connectivity,Triangle meshes,data structures},
mendeley-tags = {data structures},
number = {2},
pages = {262--269},
publisher = {Elsevier Ltd},
title = {{Zipper: A compact connectivity data structure for triangle meshes}},
url = {http://dx.doi.org/10.1016/j.cad.2012.10.009},
volume = {45},
year = {2013}
}
@article{Campen2012,
author = {Campen, Marcel and Kobbelt, Leif},
file = {:C$\backslash$:/Users/abbas/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Campen, Kobbelt - 2012 - 14.05.2012 Where are we located !.pdf:pdf},
number = {1},
pages = {1--55},
title = {{14.05.2012 Where are we located !?}},
year = {2012}
}
@article{Diaz-Gutierrez2005,
author = {Diaz-Gutierrez, Pablo and Gopi, M. and Pajarola, Renato},
file = {:C$\backslash$:/Users/abbas/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Marks et al. - 2005 - Hierarchyless Simpli cation, Stripi cation and Compression of Triangulated Two-Manifolds.pdf:pdf},
journal = {Computer},
keywords = {hand-and-glove},
mendeley-tags = {hand-and-glove},
number = {3},
title = {{Hierarchyless Simplification, Stripification and Compression of Triangulated Two-Manifolds}},
volume = {24},
year = {2005}
}
@article{Aleardia,
author = {Aleardi, Luca Castelli},
file = {:C$\backslash$:/Users/abbas/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Aleardi - Unknown - Mesh representations and data structures.pdf:pdf},
title = {{Mesh representations and data structures}}
}
@article{Lu2008,
author = {Lu, Zhe Ming and Li, Zhen},
doi = {10.1007/s11760-008-0053-8},
file = {:C$\backslash$:/Users/abbas/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Lu, Li - 2008 - Dynamically restricted codebook-based vector quantisation scheme for mesh geometry compression.pdf:pdf},
issn = {18631703},
journal = {Signal, Image and Video Processing},
keywords = {Computer graphics,Dynamically restricted codebook,Vector quantisation,Vertex data compression},
number = {3},
pages = {251--260},
title = {{Dynamically restricted codebook-based vector quantisation scheme for mesh geometry compression}},
volume = {2},
year = {2008}
}
@article{,
file = {:C$\backslash$:/Users/abbas/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Unknown - Unknown - • Theory and algorithms for efficient.pdf:pdf},
title = {{• Theory and algorithms for efficient}}
}
@article{Lee2000,
abstract = {In the field of geometry compression, two main compression targets exist. One is triangle connectivity data and the other is vertex position data. In this paper, we propose a novel algorithm to compress the vertex data. A fundamentally different approach we took in this paper is to transform the vertex positions to the model space, a coordinate system formed by the three previously processed vertices. Once all the vertices are transformed, we found that the result shows a strong tendency to cluster around three points. We exploit such tendency during the vector quantization steps to increase the compression ratio. According to the experiments performed on 12 models, the average compression performance of our algorithm is 6.7 bits/vertex, which is a clear improvement over the previous methods.$\backslash$n},
author = {Lee, Eung Seok and Ko, Hyeong Seok},
doi = {10.1109/PCCGA.2000.883945},
file = {:C$\backslash$:/Users/abbas/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Lee, Ko - 2000 - Vertex data compression for triangular meshes.pdf:pdf},
isbn = {0769508685},
issn = {15504085},
journal = {Proceedings - Pacific Conference on Computer Graphics and Applications},
keywords = {Acceleration,Animation,Bandwidth,Clustering algorithms,Computational geometry,Data compression,Graphics,Humans,Vector quantization,Virtual manufacturing},
pages = {225--234},
title = {{Vertex data compression for triangular meshes}},
volume = {2000-Janua},
year = {2000}
}
@inproceedings{Kazhdan2006,
abstract = {Poisson surface reconstruction creates watertight surfaces from oriented point sets. In this work we extend the technique to explicitly incorporate the points as interpolation constraints. The extension can be interpreted as a generalization of the underlying mathematical framework to a screened Poisson equation. In contrast to other image and geometry processing techniques, the screening term is defined over a sparse set of points rather than over the full domain. We show that these sparse constraints can nonetheless be integrated efficiently. Because the modified linear system retains the same finite-element discretization, the sparsity structure is unchanged, and the system can still be solved using a multigrid approach. Moreover we present several algorithmic improvements that together reduce the time complexity of the solver to linear in the number of points, thereby enabling faster, higher-quality surface reconstructions.},
archivePrefix = {arXiv},
arxivId = {1006.4903},
author = {Kazhdan, Michael and Bolitho, Matthew and Hoppe, Hugues},
booktitle = {Proceedings of Eurographics Symposium on Geometry Processing},
doi = {10.1145/1364901.1364904},
eprint = {1006.4903},
isbn = {3905673363},
issn = {30905673},
pmid = {15096211},
title = {{Poisson Surface Reconstruction}},
year = {2006}
}
@article{Li1998a,
abstract = {Based on state-of-the-art graphic-simplification techniques and progressive image-coding schemes, we propose a new hierarchical three-dimensional graphic-compression scheme in this research. This scheme progressively compresses an arbitrary polygonal mesh into a single bitstream. Along the encoding process, every output bit contributes to the reduction of coding distortion, and the contribution of bits decreases according to their order of position in the bitstream. At the receiver end, the decoder can stop at any point while giving a reconstruction of the original model with the best rate-distortion tradeoff. A series of models of continuous varying resolution can thus be constructed from the single bitstream. This property, which is referred to as the embedding property since the coding of a coarser model is embedded in the coding of a finer model, can be widely used in robust error control, progressive transmission and display, level-of-detail control, etc. It is demonstrated by experiments that an acceptable quality level can be achieved at a compression ratio of 20 to 1 for several test graphic models},
author = {Li, Jiankun and {Jay Kuo}, C. C.},
doi = {10.1109/5.687829},
file = {:C$\backslash$:/Users/abbas/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Li, Jay Kuo - 1998 - Progressive coding of 3-D graphic models.pdf:pdf},
issn = {00189219},
journal = {Proceedings of the IEEE},
keywords = {Embedded codecs,Graphic coding,Graphic simplification,Progressive coding},
number = {6},
pages = {1052--1063},
title = {{Progressive coding of 3-D graphic models}},
volume = {86},
year = {1998}
}
@article{Shikhare2001,
author = {Shikhare, Dinesh and Bhakar, Sushil and Mudur, Sudhir P},
file = {:C$\backslash$:/Users/abbas/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Shikhare, Bhakar, Mudur - 2001 - Compression of Large 3D Engineering Models using Automatic Discovery of Repeating Geometric Features.pdf:pdf},
journal = {Proceedings of the Vision Modeling and Visualization Conference},
number = {c},
pages = {233--240},
title = {{Compression of Large 3D Engineering Models using Automatic Discovery of Repeating Geometric Features}},
year = {2001}
}
@incollection{Rossignac2005a,
abstract = {This chapter discusses 3D compression techniques for reducing the delays in transmitting triangle meshes over the Internet. It first explains how vertex coordinates, which represent surface samples, may be compressed through quantization, prediction, and entropy coding. It then describes how the connectivity, which specifies how the surface interpolates these samples, may be compressed by compact encoding of the parameters of a connectivity-graph construction process and by transmission of the vertices in the order in which they are encountered during this process. The storage of triangle meshes compressed with these techniques is usually reduced to about a byte per triangle. When the exact geometry and connectivity of the mesh are not essential, the triangulated surface may be simplified or retiled. Although simplification techniques and the progressive transmission of refinements may be used as a compression tool, the chapter focuses on recently proposed retiling techniques, designed specifically to improve 3D compression. {\textcopyright} 2005 Copyright {\textcopyright} 2005 Elsevier Inc. All rights reserved.},
author = {Rossignac, Jarek},
booktitle = {Visualization Handbook},
doi = {10.1016/B978-012387582-2/50020-4},
file = {:C$\backslash$:/Users/abbas/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Rossignac - 2005 - 3D mesh compression(2).pdf:pdf},
isbn = {9780123875822},
pages = {359--379},
title = {{3D mesh compression}},
year = {2005}
}
@article{Kronrod2000,
abstract = {We describe an efficient algorithm for coding the connectivity information of general polygon meshes. In contrast to most existing algorithms which are suitable only for triangular meshes, and pay a penalty for treatment of nontriangular faces, this algorithm codes the connectivity information in a direct manner. Our treatment of the special case of triangular meshes is shown to be equivalent to the Edgebreaker algorithm. Using our methods, any triangle mesh may be coded in no more than 2 bits/triangle (approximately 4 bits/vertex), a quadrilateral mesh in no more than 3.5 bits/quad (approximately 3.5 bits/vertex), and the most common case of a quad mesh with few triangles in no more than 4 bits/polygon. {\textcopyright} 2001 Elsevier Science (USA).},
author = {Kronrod, B. and Gotsman, C.},
doi = {10.1109/PCCGA.2000.883946},
file = {:C$\backslash$:/Users/abbas/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Kronrod, Gotsman - 2000 - Efficient coding of non-triangular mesh connectivity.pdf:pdf},
isbn = {0769508685},
issn = {15504085},
journal = {Proceedings - Pacific Conference on Computer Graphics and Applications},
keywords = {Algorithm design and analysis,Computer science,Decoding,Surgery,Upper bound},
pages = {235--242},
title = {{Efficient coding of non-triangular mesh connectivity}},
volume = {2000-Janua},
year = {2000}
}
@article{Szymczak2000,
abstract = {Standard representations of irregular finite element meshes combine vertex data (sample coordinates and node values) and connectivity (tetrahedron-vertex incidence). Connectivity specifies how the samples should be interpolated. It may be encoded as four vertex-references for each tetrahedron, which requires 128m bits where m is the number of tetrahedra in the mesh. Our 'Grow {\&} Fold' format reduces the connectivity storage down to 7 bits per tetrahedron: three of these are used to encode the presence of children in a tetrahedron spanning tree; the other four constrain sequences of 'folding' operations, so that they produce the connectivity graph of the original mesh. Additional bits must be used for each handle in the mesh and for each topological 'lock' in the tree. However, as our experiments with a prototype implementation show, the increase of the storage cost due to this extra information is typically no more than 1-2{\%}. By storing vertex data in an order defined by the tree, we avoid the need to store tetrahedron-vertex references and facilitate variable length coding techniques for the vertex data. We provide the details of simple, loss-less compression and decompression algorithms and discuss a way of decreasing the storage cost to about 6 bits per tetrahedron. {\textcopyright} 2000 Elsevier Science Ltd. All rights reserved.},
author = {Szymczak, Andrzej and Rossignac, Jarek},
doi = {10.1016/S0010-4485(00)00040-3},
file = {:C$\backslash$:/Users/abbas/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Szymczak, Rossignac - 2000 - Grow {\&} fold Compressing the connectivity of tetrahedral meshes.pdf:pdf},
isbn = {0010-4485},
issn = {00104485},
journal = {CAD Computer Aided Design},
keywords = {Coding,Compression,Tetrahedral mesh},
number = {8-9},
pages = {527--537},
title = {{Grow {\&} fold: Compressing the connectivity of tetrahedral meshes}},
volume = {32},
year = {2000}
}
@article{Gotsman2003,
abstract = {We show that the average entropy of the distribution of valences in valence sequences for the class of manifold 3D triangle meshes and the class of manifold 3D polygon meshes is strictly less than the entropy of these classes themselves. This implies that, apart from a valence sequence, another essential piece of information is needed for valence-based connectivity coding of manifold 3D meshes. Since there is no upper bound on the size of this extra piece of information, the result implies that the question of optimality of valence-based connectivity coding is still open.},
author = {Gotsman, Craig},
doi = {10.1111/1467-8659.t01-1-00649},
file = {:C$\backslash$:/Users/abbas/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Gotsman - 2003 - On the optimality of valence-based connectivity coding.pdf:pdf},
issn = {01677055},
journal = {Computer Graphics Forum},
keywords = {optimal},
mendeley-tags = {optimal},
number = {1},
pages = {99--102},
title = {{On the optimality of valence-based connectivity coding}},
volume = {22},
year = {2003}
}
@misc{Isenburg2000c,
abstract = {Most schemes to compress the topology of a surface mesh have been developed for the lowest common denominator: triangulated meshes. We propose a scheme that handles the topology of arbitrary polygon meshes. It encodes meshes directly in their polygonal representation and extends to capture face groupings in a natural way. Avoiding the triangulation step we reduce the storage costs for typical polygon models that have group structures and property data.
},
author = {Isenburg, Martin and Snoeyink, Jack},
keywords = {site},
mendeley-tags = {site},
title = {{Face Fixer: Compressing Polygon Meshes with Properties}},
url = {http://www.cs.unc.edu/{~}isenburg/facefixer/},
urldate = {2018-10-20},
year = {2000}
}
@article{Dubrovina,
author = {Dubrovina, Anastasia and Guibas, Leonidas and Su, Hao},
file = {:C$\backslash$:/Users/abbas/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Dubrovina, Guibas, Su - Unknown - Machine Learning for 3D Data Computational Topology and.pdf:pdf},
title = {{Machine Learning for 3D Data Computational Topology and}}
}
@article{Lopes2003,
abstract = {The Edgebreaker is an efficient scheme for compressing triangulated surfaces. A surprisingly simple implementation of Edgebreaker has been proposed for surfaces homeomorphic to a sphere. It uses the corner-table data structure, which represents the connectivity of a triangulated surface by two tables of integers, and encodes them with less than 2 bits per triangle. We extend this simple formulation to deal with triangulated surfaces with handles and present the detailed pseudocode for the encoding and decoding algorithms (which take one page each). We justify the validity of the proposed approach using the mathematical formulation of the handlebody theory for surfaces, which explains the topological changes that occur when two boundary edges of a portion of a surface are identified. {\textcopyright} 2003 Elsevier Ltd. All rights reserved.},
author = {Lopes, H{\'{e}}lio and Rossignac, Jarek and Safonova, Alla and Szymczak, Andrzej and Tavares, Geovan},
doi = {10.1016/S0097-8493(03)00102-X},
file = {:C$\backslash$:/Users/abbas/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Lopes et al. - 2003 - Edgebreaker A simple implementation for surfaces with handles.pdf:pdf},
isbn = {0097-8493},
issn = {00978493},
journal = {Computers and Graphics (Pergamon)},
keywords = {Boundary representations,Data compaction and compression,Data storage representations,Graphs and algorithms,Handlebody theory},
number = {4},
pages = {553--567},
title = {{Edgebreaker: A simple implementation for surfaces with handles}},
volume = {27},
year = {2003}
}
@article{Ho2001,
abstract = {Presents an algorithm that uses partitioning and gluing to compress large triangular meshes which are too complex to fit in main memory. The algorithm is based largely on the existing mesh compression algorithms, most of which require an 'in-core' representation of the input mesh. Our solution is to partition the mesh into smaller submeshes and compress these submeshes separately using existing mesh compression techniques. Since a direct partition of the input mesh is out of question, instead we partition a simplified mesh and use the partition on the simplified model to obtain a partition on the original model. In order to recover the full connectivity, we present a simple scheme for encoding/decoding the resulting boundary structure from the mesh partition. When compressing large models with few singular vertices, a negligible portion of the compressed output is devoted to gluing information. On desktop computers, we have run experiments on models with millions of vertices, which could not be compressed using standard compression software packages, and have observed compression ratios as high as 17 to 1 using our technique.},
author = {Ho, J. and Lee, Kuang-Chih Lee Kuang-Chih and Kriegman, D.},
doi = {10.1109/VISUAL.2001.964532},
file = {:C$\backslash$:/Users/abbas/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Ho, Lee, Kriegman - 2001 - Compressing large polygonal models.pdf:pdf},
isbn = {0-7803-7201-8},
journal = {Proceedings Visualization, 2001. VIS '01.},
keywords = {compression algorithms},
title = {{Compressing large polygonal models}},
year = {2001}
}
@article{Jong2005,
author = {Jong, Bin Shyan and Yang, Wen Hao and Tseng, Juin Ling and Lin, Tsong Wuu},
doi = {10.1109/ICIS.2005.29},
file = {:C$\backslash$:/Users/abbas/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Jong et al. - 2005 - An efficient connectivity compression for triangular meshes.pdf:pdf},
isbn = {0769522963},
journal = {Proceedings - Fourth Annual ACIS International Conference on Computer and Information Science, ICIS 2005},
keywords = {Adaptive arithmetic coder,Geometry compression,Triangular mesh connectivity},
pages = {583--588},
title = {{An efficient connectivity compression for triangular meshes}},
volume = {2005},
year = {2005}
}
@book{Processing,
author = {Processing, Polygon Mesh},
file = {:C$\backslash$:/Users/abbas/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Processing - Unknown - Botsch M., et al. Polygon mesh processing (AK Peters, 2010)(ISBN 1568814267)(C)(O)(243s){\_}CsCg{\_}.pdf:pdf},
isbn = {9781568814261},
title = {{Botsch M., et al. Polygon mesh processing (AK Peters, 2010)(ISBN 1568814267)(C)(O)(243s){\_}CsCg{\_}}}
}
@inproceedings{LocalChapterEvents:ItalChap:ItalianChapConf2008:129-136,
author = {Cignoni, Paolo and Callieri, Marco and Corsini, Massimiliano and Dellepiane, Matteo and Ganovelli, Fabio and Ranzuglia, Guido},
booktitle = {Eurographics Italian Chapter Conference},
doi = {10.2312/LocalChapterEvents/ItalChap/ItalianChapConf2008/129-136},
editor = {Scarano, Vittorio and Chiara, Rosario De and Erra, Ugo},
isbn = {978-3-905673-68-5},
publisher = {The Eurographics Association},
title = {{MeshLab: an Open-Source Mesh Processing Tool}},
year = {2008}
}
@article{Preda2010,
abstract = {The paper is an overview of 3D graphics assets and applications standards.The authors analyzed the three main open standards dealing with three-dimensional (3-D) graphics content and applications, X3D, COLLADA, and MPEG4, to clarify the role of each with respect to the following criteria: ability to describe only the graphics assets in a synthetic 3-D scene or also its behavior as an application, compression capacities, and appropriateness for authoring, transmission, and publishing. COLLADA could become the interchange format for authoring tools; MPEG4 on top of it (as specified in MPEG-4 Part 25), the publishing format for graphics assets; and X3D, the standard for interactive applications, enriched by MPEG-4 compression in the case of online ones. The authors also mentioned that in order to build a mobile application, a developer has to consider different hardware configurations and performances, different operating systems, different screen sizes, and input controls.},
author = {Preda, Marius and Arsov, Ivica and Mor{\'{a}}n, Francisco},
doi = {10.1109/MVT.2009.935544},
file = {:C$\backslash$:/Users/abbas/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Preda, Arsov, Mor{\'{a}}n - 2010 - COLLADA MPEG-4 or X3D MPEG-4 An overview of 3-D graphics assets and applications standards.pdf:pdf},
issn = {15566072},
journal = {IEEE Vehicular Technology Magazine},
title = {{COLLADA + MPEG-4 or X3D + MPEG-4: An overview of 3-D graphics assets and applications standards}},
year = {2010}
}
@article{Taubin1998,
abstract = {The virtual-reality modeling language (VRML) is rapidly becoming$\backslash$nthe standard file format for transmitting three-dimensional (3-D)$\backslash$nvirtual worlds across the Internet. Static and dynamic descriptions of$\backslash$n3-D objects, multimedia content, and a variety of hyperlinks can be$\backslash$nrepresented in VRML files. Both VRML browsers and authoring tools for$\backslash$nthe creations of VRML files are widely available for several different$\backslash$nplatforms. In this paper, we describe the topologically assisted$\backslash$ngeometric compression technology included in our proposal for the VRML$\backslash$ncompressed binary format. This technology produces significant reduction$\backslash$nof file sizes and, subsequently, of the time required for transmission$\backslash$nof such filed across the Internet. Compression ratios of 50:1 or more$\backslash$nare achieved for large models. The proposal also includes a binary$\backslash$nencoding to create compact, rapidly parsable binary VRML files. The$\backslash$nproposal is currently being evaluated by the Compressed Binary Format$\backslash$nWorking Group of the VRML consortium as a possible extension of the VRML$\backslash$nstandard. In the topologically assisted compression scheme, a polyhedron$\backslash$nis represented using two interlocking trees: a spanning tree of vertices$\backslash$nand a spanning tree of triangles. The connectivity information$\backslash$nrepresented in other compact schemes, such as triangular strips and$\backslash$ngeneralized triangular meshes, can be directly derived from this$\backslash$nrepresentation. Connectivity information for large models is compressed$\backslash$nwith storage requirements approaching one bit per triangle. A$\backslash$nvariable-length, optionally lossy compression technique is used for$\backslash$nvertex positions, normals, colors, and texture coordinates. The format$\backslash$nsupports all VRML property binding conventions},
author = {Taubin, Gabriel and Horn, William P. and Lazarus, Francis and Rossignac, Jarek},
doi = {10.1109/5.687837},
file = {:C$\backslash$:/Users/abbas/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Taubin et al. - 1998 - Geometry coding and VRML(2).pdf:pdf},
isbn = {0018-9219},
issn = {00189219},
journal = {Proceedings of the IEEE},
keywords = {Algorithms,Compression,Graphics},
number = {6},
pages = {1228--1243},
title = {{Geometry coding and VRML}},
volume = {86},
year = {1998}
}
@article{castellialeardi:inria-00337821,
author = {{Castelli Aleardi}, Luca and Devillers, Olivier and Schaeffer, Gilles},
doi = {10.1016/j.tcs.2008.08.016},
journal = {Theoretical Computer Science},
number = {2-3},
pages = {174--187},
publisher = {Elsevier},
series = {Excursions in Algorithmics: A Collection of Papers in Honor of Franco P. Preparata},
title = {{Succinct representations of planar maps}},
url = {https://hal.inria.fr/inria-00337821},
volume = {408},
year = {2008}
}
@article{Taubin1999a,
author = {Taubin, Gabriel and Rossignac, Jarek},
file = {:C$\backslash$:/Users/abbas/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Taubin, Rossignac - 1999 - 3D geometry compression.pdf:pdf;:C$\backslash$:/Users/abbas/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Taubin, Rossignac - 1999 - 3D geometry compression(2).pdf:pdf},
journal = {Course Notes},
keywords = {survey},
mendeley-tags = {survey},
pages = {18--24},
title = {{3D geometry compression}},
url = {http://scholar.google.com/scholar?hl=en{\&}btnG=Search{\&}q=intitle:3D+Geometry+Compression{\#}0},
volume = {21},
year = {1999}
}
@article{Rossignac2010,
author = {Rossignac, Jarek},
doi = {10.1201/9781420035315.ch54},
file = {:C$\backslash$:/Users/abbas/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Rossignac - 2010 - Surface simplification and 3D geometry compression.pdf:pdf},
title = {{Surface simplification and 3D geometry compression}},
year = {2010}
}
@article{Hoppe,
author = {Hoppe, H},
file = {:C$\backslash$:/Users/abbas/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Hoppe - Unknown - Rendering g Complex Meshes.pdf:pdf},
title = {{Rendering g Complex Meshes}}
}
@article{Kettner1999,
abstract = {Software design solutions are presented for combinatorial data structures, such as polyhedral surfaces and planar maps, tailored for program libraries in computational geometry. Design issues considered are flexibility, time and space efficiency, and ease-of-use. We focus on topological aspects of polyhedral surfaces and evaluate edge-based representations with respect to our design goals. A design for polyhedral surfaces in a halfedge data structure is developed following the generic programming paradigm known from the Standard Template Library STL for C++. Connections are shown to planar maps and face-based structures. {\textcopyright} 1999 Elsevier Science B.V. All rights reserved.},
author = {Kettner, Lutz},
doi = {10.1016/S0925-7721(99)00007-3},
file = {:C$\backslash$:/Users/abbas/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Kettner - 1999 - Using generic programming for designing a data structure for polyhedral surfaces.pdf:pdf},
isbn = {0-89791-973-4},
issn = {09257721},
journal = {Computational Geometry: Theory and Applications},
keywords = {Combinatorial data structure,Generic programming,Halfedge data structure,Library design,Polyhedral surface},
number = {1},
pages = {65--90},
title = {{Using generic programming for designing a data structure for polyhedral surfaces}},
volume = {13},
year = {1999}
}
@article{Gumhold2000,
author = {Gumhold, Stefan},
file = {:C$\backslash$:/Users/abbas/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Gumhold - 2000 - New Bounds on The Encoding of Planar Triangulations.pdf:pdf},
title = {{New Bounds on The Encoding of Planar Triangulations}},
year = {2000}
}
@article{Alliez2001b,
abstract = {In this paper, we propose a valence-driven, single-resolution encoding technique for lossless compression of trian- gle mesh connectivity. Building upon a valence-based approach pioneered by Touma and Gotsman22, we design a new valence-driven conquest for arbitrary meshes that always guarantees smaller compression rates than the original method. Furthermore, we provide a novel theoretical entropy study of our technique, hinting the optimal- ity of the valence-driven approach. Finally, we demonstrate the practical efficiency of this approach (in agreement with the theoretical prediction) on a series of test meshes, resulting in the lowest compression ratios published so far, for both irregular and regular meshes, small or large.},
author = {Alliez, Pierre and Desbrun, Mathieu},
doi = {10.1111/1467-8659.00541},
file = {:C$\backslash$:/Users/abbas/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Alliez, Desbrun - 2001 - Valence-Driven Connectivity Encoding for 3D Meshes.pdf:pdf},
issn = {0167-7055},
journal = {Computer Graphics Forum},
number = {3},
pages = {480--489},
title = {{Valence-Driven Connectivity Encoding for 3D Meshes}},
url = {http://doi.wiley.com/10.1111/1467-8659.00541},
volume = {20},
year = {2001}
}
@article{Lindstrom2008,
author = {Lindstrom, Peter and Isenburg, Martin},
doi = {10.1109/DCC.2008.12},
file = {:C$\backslash$:/Users/abbas/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Lindstrom, Isenburg - 2008 - Lossless Compression of Hexahedral Meshes.pdf:pdf},
isbn = {978-0-7695-3121-2},
issn = {1068-0314},
journal = {Proceedings of the Data Compression Conference},
pages = {192--201},
title = {{Lossless Compression of Hexahedral Meshes}},
year = {2008}
}
@article{Mammou,
author = {Mammou, Khaled},
doi = {10.1016/B978-0-12-387582-2.50020-4},
file = {:C$\backslash$:/Users/abbas/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Mammou - Unknown - 3D Mesh Compression in Open3DGC.pdf:pdf},
journal = {The Visualization Handbook},
pages = {1--15},
title = {{3D Mesh Compression in Open3DGC}},
url = {http://dx.doi.org/10.1016/B978-0-12-387582-2.50020-4}
}
@article{Luffel2014,
abstract = {We present Grouper: an all-in-one compact file format, random-access data structure, and streamable representation for large triangle meshes. Similarly to the recently published SQuad representation, Grouper represents the geometry and connectivity of a mesh by grouping vertices and triangles into fixed-size records, most of which store two adjacent triangles and a shared vertex. Unlike SQuad, however, Grouper interleaves geometry with connectivity and uses a new connectivity representation to ensure that vertices and triangles can be stored in a coherent order that enables memory-efficient sequential stream processing. We present a linear-time construction algorithm that allows streaming out Grouper meshes using a small memory footprint while preserving the initial ordering of vertices. As a part of this construction, we show how the problem of assigning vertices and triangles to groups reduces to a well-known NP-hard optimization problem, and present a simple yet effective heuristic solution that performs well in practice. Our array-based Grouper representation also doubles as a triangle mesh data structure that allows direct access to vertices and triangles. Storing only about two integer references per trianglea?i.e., less than the three vertex references stored with each triangle in a conventional indexed mesh format-Grouper answers both incidence and adjacency queries in amortized constant time. Our compact representation enables data-parallel processing on multicore computers, instant partitioning and fast transmission for distributed processing, as well as efficient out-of-core access. We demonstrate the versatility and performance benefits of Grouper using a suite of example meshes and processing kernels.},
author = {Luffel, Mark and Gurung, Topraj and Lindstrom, Peter and Rossignac, Jarek},
doi = {10.1109/TVCG.2013.81},
file = {:C$\backslash$:/Users/abbas/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Luffel et al. - 2014 - Grouper A compact, streamable triangle mesh data structure.pdf:pdf},
isbn = {1077-2626},
issn = {10772626},
journal = {IEEE Transactions on Visualization and Computer Graphics},
keywords = {Mesh compression,data structures,large meshes,mesh data structures,out-of-core algorithms,random access},
mendeley-tags = {data structures},
number = {1},
pages = {84--98},
pmid = {24201328},
title = {{Grouper: A compact, streamable triangle mesh data structure}},
volume = {20},
year = {2014}
}
@article{Rossignac1999c,
abstract = {Edgebreaker is a simple scheme for compressing the triangle/vertex$\backslash$nincidence graphs (sometimes called connectivity or topology) of$\backslash$nthree-dimensional triangle meshes. Edgebreaker improves upon the storage$\backslash$nrequired by previously reported schemes, most of which can guarantee$\backslash$nonly an O(t log(t)) storage cost for the incidence graph of a mesh of t$\backslash$ntriangles. Edgebreaker requires at most 2t bits for any mesh$\backslash$nhomeomorphic to a sphere and supports fully general meshes by using$\backslash$nadditional storage per handle and hole. For large meshes, entropy coding$\backslash$nyields less than 1.5 bits per triangle. Edgebreaker's compression and$\backslash$ndecompression processes perform identical traversals of the mesh from$\backslash$none triangle to an adjacent one. At each stage, compression produces an$\backslash$nop-code describing the topological relation between the current triangle$\backslash$nand the boundary of the remaining part of the mesh. Decompression uses$\backslash$nthese op-codes to reconstruct the entire incidence graph. Because$\backslash$nEdgebreaker's compression and decompression are independent of the$\backslash$nvertex locations, they may be combined with a variety of$\backslash$nvertex-compressing techniques that exploit topological information about$\backslash$nthe mesh to better estimate vertex locations. Edgebreaker may be used to$\backslash$ncompress the connectivity of an entire mesh bounding a 3D polyhedron or$\backslash$nthe connectivity of a triangulated surface patch whose boundary need not$\backslash$nbe encoded. The paper also offers a comparative survey of the rapidly$\backslash$ngrowing field of geometric compression},
author = {Rossignac, Jarek},
doi = {10.1109/2945.764870},
file = {:C$\backslash$:/Users/abbas/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Rossignac - 1999 - Edgebreaker Connectivity compression for triangle meshes.pdf:pdf},
isbn = {1077-2626},
issn = {10772626},
journal = {IEEE Transactions on Visualization and Computer Graphics},
number = {1},
pages = {47--61},
title = {{Edgebreaker: Connectivity compression for triangle meshes}},
volume = {5},
year = {1999}
}
@article{Jovanova2008,
abstract = {This paper introduces a new model for 3D graphics compression, recently adopted by MPEG community in the Part 25 of the MPEG-4 standard. The model makes possible to use MPEG- 4 compression on top of a third party XML description, facilitating its deployment. Now, the compression and transmission layer, ensured by MPEG-4, is better integrated with the production to consumption chain. The paper reports and analyzes the compression results when using the proposed model.},
author = {Jovanova, Blagica and Preda, Marius and Preteux, Fran{\c{c}}oise},
doi = {10.1109/3DTV.2008.4547818},
file = {:C$\backslash$:/Users/abbas/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Jovanova, Preda, Preteux - 2008 - MPEG-4 part 25 A generic model for 3D graphics compression.pdf:pdf},
isbn = {9781424417551},
journal = {2008 3DTV-Conference: The True Vision - Capture, Transmission and Display of 3D Video, 3DTV-CON 2008 Proceedings},
keywords = {3D graphics,Animation,COLLADA,Compression,MPEG-4,Standards,XML},
number = {January},
pages = {101--104},
title = {{MPEG-4 part 25: A generic model for 3D graphics compression}},
year = {2008}
}
@article{Isenburg2002c,
author = {Isenburg, Martin},
file = {:C$\backslash$:/Users/abbas/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Isenburg - 2002 - Compressing polygon mesh connectivity with degree duality prediction.pdf:pdf},
issn = {07135424},
journal = {Graphics Interface},
keywords = {compression,connectivity coding,degree duality,graph coding,mesh,non-manifold meshes,polygon},
mendeley-tags = {polygon},
pages = {161--170},
title = {{Compressing polygon mesh connectivity with degree duality prediction}},
url = {http://pdf.aminer.org/000/235/875/compressing{\_}polygon{\_}mesh{\_}connectivity{\_}with{\_}degree{\_}duality{\_}prediction.pdf},
year = {2002}
}
@misc{Turk2000,
author = {Turk, Greg and Mullins, Brandon},
keywords = {models,site},
mendeley-tags = {models,site},
title = {{Large Geometric Models Archive}},
url = {https://www.cc.gatech.edu/projects/large{\_}models/},
urldate = {2018-12-13},
year = {2000}
}
@inproceedings{Buelow2017,
abstract = {Polygonal meshes are used in various fields ranging from CAD to gaming and web based applications. Reducing the size required for storing and transmitting these meshes by taking advantage of redundancies is an important aspect in all of these cases. In this paper, we present a connectivity based compression approach that predicts attributes and stores differences to the predictions together with minimal connectivity information. It is an extension to the Cut-Border Machine and applicable to arbitrary manifold and non-manifold polygonal meshes containing multiple attributes of different types. It compresses both the connectivity and attributes without loss outside ofre-ordering vertices and polygons. In addition, an optional quantization step can be used to further reduce the data ifa certain loss ofaccuracy is acceptable. Our method outperforms state-of-the-art compression techniques, including specialized triangle mesh compression approaches when applicable. Typical compression rates for our approach range from 2:1 to 6:1 for lossless compression and up to 25:1 when quantizing to 14 bit accuracy.},
author = {Buelow, Max Von and Guthe, Stefan and Goesele, Michael},
booktitle = {Eurographics Proceedings},
file = {:C$\backslash$:/Users/abbas/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Buelow, Guthe, Goesele - 2017 - Compression of Non-Manifold Polygonal Meshes Revisited.pdf:pdf},
title = {{Compression of Non-Manifold Polygonal Meshes Revisited}},
url = {https://www.gcc.tu-darmstadt.de/home/proj/meshcomp/ https://github.com/magcks/harry},
year = {2017}
}
@article{Botsch2002,
abstract = {We describe the implementation of a half-edge data structure for the static representation and dynamic handling of arbitrary polygonal meshes. The particular design of the data structures and classes aims at maximum flexibility and high performance. We achieve this by using generative programming concepts which allow the compiler to resolve most of the special case handling decisions at compile time. We evaluate our data structure based on prototypic implementations of mesh processing applications such as decimation and smoothing.},
author = {Botsch, Mario and Steinberg, S. and Bischoff, S. and Kobbelt, L.},
file = {:C$\backslash$:/Users/abbas/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Botsch et al. - 2002 - OpenMesh - a generic and efficient polygon mesh data structure.pdf:pdf},
journal = {OpenSG Symposium},
title = {{OpenMesh - a generic and efficient polygon mesh data structure}},
year = {2002}
}
@article{Attene2003,
abstract = {We focus on the lossy compression of manifold triangle meshes. Our SwingWrapper approach partitions the surface of an original mesh M into simply connected regions, called triangloids. From these, we generate a new mesh M'. Each triangle of M' is an approximation of a triangloid of M. By construction, the connectivity of M' is fairly regular and can be compressed to less than a bit per triangle using EdgeBreaker or one of the other recently developed schemes. The locations of the vertices of M' are compactly encoded with our new prediction technique, which uses a single correction parameter per vertex. SwingWrapper strives to reach a user-defined output file size rather than to guarantee a given error bound. For a variety of popular models, a rate of 0.4 bits/triangle yields an L-2 distortion of about 0.01{\%} of the bounding box diagonal. The proposed solution may also be used to encode crude meshes for adaptive transmission or for controlling subdivision surfaces.},
author = {Attene, M and Falcidieno, B and Spagnuolo, M and Rossignac, Jarek},
doi = {Doi 10.1145/944020.944022},
file = {:C$\backslash$:/Users/abbas/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Attene et al. - 2003 - SwingWrapper Retiling triangle meshes for better EdgeBreaker compression.pdf:pdf},
isbn = {0730-0301},
issn = {07300301},
journal = {Acm Transactions on Graphics},
keywords = {algorithms,connectivity,error,geometry compression,remeshing,retiling,simplification,triangle mesh},
number = {4},
pages = {982--996},
title = {{SwingWrapper: Retiling triangle meshes for better EdgeBreaker compression}},
volume = {22},
year = {2003}
}
@article{Touma1998,
abstract = {A novel algorithm for the encoding of orientable manifold triangle mesh geometry is presented. Mesh connectivity is encoded in a lossless manner. Vertex coordinate data is uniformly quantized and then losslessly encoded. The compression ratios achieved by the algorithm are shown to be significantly better than those of currently available algorithms for both connectivity and coordinate data. Use of our algorithm may lead to significant reduction of bandwidth required for the transmission of VRML files over the Internet.},
author = {Touma, Costa and Gotsman, Craig},
file = {:C$\backslash$:/Users/abbas/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Touma, Gotsman - 1998 - Triangle Mesh Compression.pdf:pdf},
isbn = {1581132085},
issn = {07135424},
journal = {Graphics Interface},
keywords = {Coding,Compression,Triangle mesh,VRML},
title = {{Triangle mesh compression}},
year = {1998}
}
@inproceedings{baumgart1975polyhedron,
author = {Baumgart, Bruce G},
booktitle = {Proceedings of the May 19-22, 1975, national computer conference and exposition},
file = {:C$\backslash$:/Users/abbas/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Baumgart - 1975 - A polyhedron representation for computer vision.pdf:pdf},
organization = {ACM},
pages = {589--596},
title = {{A polyhedron representation for computer vision}},
year = {1975}
}
@article{Gurung2011a,
abstract = {The SQuad data structure represents the connectivity of a triangle mesh by its "S table" of about 2 rpt (integer references per triangle). Yet it allows for a simple implementation of expected constant-time, random-access operators for traversing the mesh, including in-order traversal of the triangles incident upon a vertex. SQuad is more compact than the Corner Table (CT), which stores 6 rpt, and than the recently proposed SOT, which stores 3 rpt. However, in-core access is generally faster in CT than in SQuad, and SQuad requires rebuilding the S table if the connectivity is altered. The storage reduction and memory coherence opportunities it offers may help to reduce the frequency of page faults and cache misses when accessing elements of a mesh that does not fit in memory. We provide the details of a simple algorithm that builds the S table and of an optimized implementation of the SQuad operators. [PUBLICATION ABSTRACT]},
author = {Gurung, Topraj and Laney, Daniel and Lindstrom, Peter and Rossignac, Jarek},
doi = {10.1111/j.1467-8659.2011.01866.x},
file = {:C$\backslash$:/Users/abbas/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Gurung et al. - 2011 - SQuad Compact representation for triangle meshes.pdf:pdf},
isbn = {01677055},
issn = {14678659},
journal = {Computer Graphics Forum},
keywords = {data structures},
mendeley-tags = {data structures},
number = {2},
pages = {355--364},
title = {{SQuad: Compact representation for triangle meshes}},
volume = {30},
year = {2011}
}
@article{Meng2010,
abstract = {The transmission and storage of large amounts of triangle and vertex geometry data are required for rendering geometrically detailed 3D meshes. This paper proposes a novel vertex encoding algorithm using VQ with a region growing based k-ring prediction scheme. During the encoding process, the parameter k at the vertex to be encoded is estimated by the error vectors of the preceding encoded vertices, and then the adaptive prediction rule of the vertex is computed according to k. Experiment results show that, compared with the vertex encoding algorithm based on the conventional parallelogram prediction, the proposed algorithm achieves a higher encoding quality at the same bit rate.},
author = {Meng, Shaoliang and Wang, Aili and Li, Shengming},
doi = {10.1109/ISSCAA.2010.5632292},
file = {:C$\backslash$:/Users/abbas/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Meng, Wang, Li - 2010 - Compression of 3D triangle meshes based on predictive vector quantization.pdf:pdf},
isbn = {9781424460441},
journal = {ISSCAA2010 - 3rd International Symposium on Systems and Control in Aeronautics and Astronautics},
keywords = {Signal, image and data processing},
pages = {1403--1406},
title = {{Compression of 3D triangle meshes based on predictive vector quantization}},
year = {2010}
}
@article{mcguire2000half,
author = {McGuire, Max},
file = {:C$\backslash$:/Users/abbas/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/McGuire - 2000 - The half-edge data structure.pdf:pdf},
keywords = {site},
mendeley-tags = {site},
title = {{The half-edge data structure}},
urldate = {2018-11-07},
year = {2000}
}
@article{Taubin1998a,
abstract = {The abundance and importance of complex 3-D data bases in major industry segments, the affordability of interactive 3-D rendering for office and consumer use, and the exploitation of the Internet to distribute and share 3-D data have intensified the need for an effective 3-D geometric compression technique that would significantly reduce the time required to transmit 3-D models over digital communication channels, and the amount of memory or disk space required to store the models. Because the prevalent representation of 3-D models for graphics purposes is polyhedral and because polyhedral models are in general triangulated for rendering, this article introduces a new compressed representation for complex triangulated models and simple, yet efficient, compression and decompression algorithms. In this scheme, vertex positions are quantized within the desired accuracy, a vertex spanning tree is used to predict the position of each vertex from 2, 3, or 4 of its ancestors in the tree, and the correction vectors are entropy encoded. Properties, such as normals, colors, and texture coordinates, are compressed in a similar manner. The connectivity is encoded with no loss of information to an average of less than two bits per triangle. The vertex spanning tree and a small-set of jump edges are used to split the model into a simple polygon. A triangle spanning tree and a sequence of marching bits are used to encode the triangulation of the polygon. Our approach improves on Michael Deering's pioneering results by exploiting the geometric coherence of several ancestors in the vertex spanning tree, preserving the connectivity with no loss of information, avoiding vertex repetitions, and using about three times fewer bits for the connectivity. However, since decompression requires random access to all vertices, this method must be modified for hardware rendering with limited onboard memory. Finally, we demonstrate implementation results for a variety of VRML models with up to two orders of magnitude compression.},
author = {Taubin, Gabriel and Rossignac, Jarek},
doi = {10.1145/274363.274365},
file = {:C$\backslash$:/Users/abbas/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Taubin, Rossignac - 1998 - Geometric compression through topological surgery.pdf:pdf;:C$\backslash$:/Users/abbas/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Taubin, Rossignac - 1998 - Geometric compression through topological surgery(2).pdf:pdf},
isbn = {0730-0301},
issn = {07300301},
journal = {ACM Transactions on Graphics},
number = {2},
pages = {84--115},
title = {{Geometric compression through topological surgery}},
url = {http://portal.acm.org/citation.cfm?doid=274363.274365},
volume = {17},
year = {1998}
}
@article{Isenburg2001a,
abstract = {We describe a method to visualize the connectivity graph of a mesh using a natural embedding in 3D space. This uses a 3D shape representation that is based solely on mesh connectivity: the connectivity shape. Given a connectivity, we define its natural geometry as a smooth embedding in space with uniform edge lengths and describe efficient techniques to compute it. Our main contribution is to demonstrate that a surprising amount of geometric information is implicit in the connectivity. We also show how to generate connectivity shapes that approximate given 3D shapes. Potential applications of connectivity shapes to modeling and mesh coding are described.},
author = {Isenburg, Martin and Gumhold, Stefan and Gotsman, C.},
doi = {10.1109/VISUAL.2001.964504},
file = {:C$\backslash$:/Users/abbas/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Isenburg, Gumhold, Gotsman - 2001 - Connectivity shapes.pdf:pdf},
isbn = {0-7803-7201-8},
journal = {Proceedings Visualization, 2001. VIS '01.},
keywords = {implicit ge-,mesh connectivity,natural embedding,ometry,polygon meshes,shape compression},
pages = {1--8},
title = {{Connectivity shapes}},
year = {2001}
}
@misc{Geldreich2009,
author = {Geldreich, Richard},
keywords = {program,site},
mendeley-tags = {program,site},
title = {{Lzham}},
url = {https://github.com/richgel999/lzham{\_}codec},
year = {2009}
}
@phdthesis{tunstall1967synthesis,
author = {Tunstall, Brian Parker},
school = {Georgia Institute of Technology},
title = {{Synthesis of noiseless compression codes}},
year = {1967}
}
@inproceedings{Oral2017a,
abstract = {Self-Organizing Maps (SOM), a type of Artificial Neural Network (ANN), is a data clustering tool that provides a way of representing multi-dimensional data in two-dimensional space. The maps are produced preserving topological relations between parameters of the input vectors. Unlike multi-layered feed forward neural networks, SOM employs unsupervised learning training mechanism. Interestingly, it requires no prior knowledge regarding the solution. The variety of the applications which employ SOM for data analysis reported in the literature is a clear indication of its acceptance as a powerful data analysis tool. SOM may, not only, present better viewing opportunities in such cases that displaying the relationships between the factors effecting the problem is impossible, but also, provides better exploration of the data. Application of SOM on the data collected in fisheries science provided enhanced outcomes and better understanding on the data collected. In this paper, SOM is discussed and reviewed in view of aquaculture and fisheries research based on the prevalence of isopods in the buccal cavity of one grouper species. The research was carried out to determine the seasonal patterns and potential impacts of the parasites on the goldblotch grouper using the SOM which were conducted in Iskenderun Bay.},
address = {İskenderun},
author = {Oral, Mustafa and Elmas, A Abbas and Gen{\c{c}}, M Ayce and Kaya, Doğukan and Gen{\c{c}}, Erc{\"{u}}ment},
booktitle = {INTERNATIONAL İSKENDERUN BAY SYMPOSIUM},
editor = {{\"{O}}zcan, Tahir and Y{\"{u}}cel, Nebil and {\"{O}}zcan, G{\"{u}}lnaz},
file = {:C$\backslash$:/Users/abbas/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Oral et al. - 2017 - AN ARTIFICIAL INTELLIGENCE BASED FISHERIES RESEARCH ON THE EVALUATION OF GNATHIID PARASITISM IN GOLDBLOTCH GROUPER.pdf:pdf},
keywords = {Artificial Neural Network,Iskenderun Bay,Self-organizing maps,fish parasites},
number = {October},
pages = {2017},
title = {{AN ARTIFICIAL INTELLIGENCE BASED FISHERIES RESEARCH ON THE EVALUATION OF GNATHIID PARASITISM IN GOLDBLOTCH GROUPER OF ISKENDERUN BAY}},
year = {2017}
}
@article{Nokia2016,
author = {Nokia, Duda},
file = {:C$\backslash$:/Users/abbas/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Nokia - 2016 - Old and new coding techniques for data compression , error correction and nonstandard situations Lecture I data compress.pdf:pdf},
title = {{Old and new coding techniques for data compression , error correction and nonstandard situations Lecture I : data compression {\ldots} data encoding}},
year = {2016}
}
@article{rossignac1998just,
author = {Rossignac, Jarek},
file = {:C$\backslash$:/Users/abbas/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Rossignac - 1998 - Just-in-time upgrades for triangle meshes.pdf:pdf},
journal = {3D Geometry Compression, Course 21, SIGGRAPH'98},
pages = {18--24},
title = {{Just-in-time upgrades for triangle meshes}},
year = {1998}
}
@article{Kallmann2001,
annote = {doi: 10.1080/10867651.2001.10487533},
author = {Kallmann, Marcelo and Thalmann, Daniel},
doi = {10.1080/10867651.2001.10487533},
issn = {1086-7651},
journal = {Journal of Graphics Tools},
month = {jan},
number = {1},
pages = {7--18},
publisher = {Taylor {\&} Francis},
title = {{Star-Vertices: A Compact Representation for Planar Meshes with Adjacency Information}},
url = {https://doi.org/10.1080/10867651.2001.10487533},
volume = {6},
year = {2001}
}
@article{Isenburg2005c,
abstract = {The parallelogram rule is a simple, yet effective scheme to predict the position of a vertex from a neighboring triangle. It was introduced by Touma and Gotsman [1998] to compress the vertex positions of triangular meshes. Later, Isenburg and Alliez [2002] showed that this rule is especially efficient for quad-dominant polygon meshes when applied "within" rather than across polygons. However, for hexagon-dominant meshes the parallelogram rule systematically performs miss-predictions.In this paper we present a generalization of the parallelogram rule to higher degree polygons. We compute a Fourier decomposition for polygons of different degrees and assume the highest frequencies to be zero for predicting missing points around the polygon. In retrospect, this theory also validates the parallelogram rule for quadrilateral surface mesh elements, as well as the Lorenzo predictor [Ibarria et al. 2003] for hexahedral volume mesh elements.},
author = {Isenburg, Martin and Ivrissimtzis, Ioannis and Gumhold, Stefan and Seidel, Hans-Peter},
doi = {10.1145/1090122.1090146},
file = {:C$\backslash$:/Users/abbas/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Isenburg et al. - 2005 - Geometry prediction for high degree polygons.pdf:pdf},
isbn = {1595932036},
journal = {Proceedings of the 21st spring conference on Computer graphics - SCCG '05},
keywords = {mesh compression,predictive geometry coding},
pages = {147--152},
title = {{Geometry prediction for high degree polygons}},
url = {http://dl.acm.org/citation.cfm?id=1090146{\%}5Cnhttp://portal.acm.org/citation.cfm?doid=1090122.1090146},
year = {2005}
}
@article{Szymczak2001,
abstract = {One of the most natural measures of regularity of a triangular mesh homeomorphic to the two-dimensional sphere is the fraction of its vertices having degree 6. We construct a linear-time connectivity compression scheme build upon Edgebreaker which explicitly takes advantage of regularity and prove rigorously that, for sufficiently large and regular meshes, it produces encodings not longer than 0.811 bits per triangle: 50{\%} below the information-theoretic lower bound for the class of all meshes. Our method uses predictive techniques enabled by the Spirale Reversi decoding algorithm.},
author = {Szymczak, Andrzej and King, Davis and Rossignac, Jarek},
doi = {10.1016/S0925-7721(01)00035-9},
file = {:C$\backslash$:/Users/abbas/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Szymczak, King, Rossignac - 2001 - An Edgebreaker-based efficient compression scheme for regular meshes.pdf:pdf},
isbn = {0925-7721},
issn = {09257721},
journal = {Computational Geometry: Theory and Applications},
keywords = {Compression,Information-theoretic lower bound,Triangle mesh},
number = {1-2},
pages = {53--68},
title = {{An Edgebreaker-based efficient compression scheme for regular meshes}},
volume = {20},
year = {2001}
}
@article{Alliez2005,
abstract = {3D meshes are widely used in graphic and simulation applications for approximating 3D objects. When representing complex shapes in a raw data for- mat, meshes consume a large amount of space. Applications calling for compact storage and fast transmission of 3D meshes have motivated the multitude of al- gorithms developed to efficiently compress these datasets. In this paper we survey recent developments in compression of 3D surface meshes.We survey the main ideas and intuition behind techniques for single-rate and progressive mesh coding. Where possible, we discuss the theoretical results obtained for asymptotic behavior or op- timality of the approach. We also list some open questions and directions for future research.},
author = {Alliez, Pierre and Gotsman, Craig},
doi = {10.1007/3-540-26808-1_1},
file = {:C$\backslash$:/Users/abbas/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Alliez, Gotsman - 2005 - Recent Advances in Compression of 3D Meshes.pdf:pdf},
isbn = {978-3-540-21462-5},
journal = {Advances in Multiresolution for Geometric Modelling},
keywords = {survey},
mendeley-tags = {survey},
pages = {3--26},
title = {{Recent Advances in Compression of 3D Meshes}},
url = {http://link.springer.com/10.1007/3-540-26808-1{\_}1},
year = {2005}
}
@article{Bajaj1999,
abstract = {Triangular meshes are widely used as primary representation of surface models for networked gaming and for complex interactive design in manufacturing. Accurate triangulation of a surface with sharp features (highly varying curvatures, holes) may require an extremely large number of triangles. Fast transmission of such large triangle meshes is critical to many applications that interactively manipulate geometric models in remote networked environments. The need for a succinct representation is therefore not only to reduce static storage requirements, but also to consume less network bandwidth and thus reduce the transmission time. In this paper we address the problem of defining a space efficient encoding scheme for both lossless and error-bounded lossy compression of triangular meshes that is robust enough to handle directly arbitrary sets of triangles including non-orientable meshes, non-manifold meshes and even non-mesh cases. The compression is achieved by capturing the redundant information in both the topology (connectivity) and geometry with possibly property attributes. Example models and results are also reported. {\textcopyright} 1999 Published by Elsevier Science B.V. All rights reserved.},
author = {Bajaj, Chandrajit L. and Pascucci, Valerio and Zhuang, Guozhong and Work, Prior},
doi = {10.1016/S0925-7721(99)00026-7},
file = {:C$\backslash$:/Users/abbas/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Bajaj et al. - 1999 - Single Resolution Compression of Arbitrary Triangular Meshes.pdf:pdf},
isbn = {076950096X},
issn = {09257721},
journal = {Computational Geometry: Theory and Applications},
pages = {1--10},
title = {{Single Resolution Compression of Arbitrary Triangular Meshes}},
year = {1999}
}
@article{Apfer2011,
author = {Apfer, J Oshua M K and Aloski, R O R I A P},
file = {:C$\backslash$:/Users/abbas/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Apfer, Aloski - 2011 - M esh D ata S tructures tr ct res Data Structures.pdf:pdf},
keywords = {entanglement,erosion control,erosion fences,mesh,risk},
number = {January},
pages = {1--9},
title = {{M esh D ata S tructures tr ct res Data Structures}},
volume = {6},
year = {2011}
}
@article{Prat2005,
author = {Prat, Sylvain and Gioia, Patrick and Bertrand, Yves and Meneveaux, Daniel},
doi = {10.1007/s00371-005-0325-z},
file = {:C$\backslash$:/Users/abbas/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Prat et al. - 2005 - Connectivity compression in an arbitrary dimension.pdf:pdf},
issn = {01782789},
journal = {Visual Computer},
keywords = {Compression,Connectivity,Generalized maps,Topological structure},
number = {8-10},
pages = {876--885},
title = {{Connectivity compression in an arbitrary dimension}},
volume = {21},
year = {2005}
}
@misc{Web3DConsortium2015,
author = {{Web3D Consortium}},
pages = {3},
publisher = {ISO/IEC 19776-3:2015},
title = {{Extensible 3D (X3D) encodings -- Part 3: Compressed binary encoding}},
url = {https://www.iso.org/standard/60504.html},
year = {2015}
}
@article{Cohen-Or2002,
author = {Cohen-Or, D. and Cohen, Rami and Irony, Revital},
file = {:C$\backslash$:/Users/abbas/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Cohen-Or, Cohen, Irony - 2002 - Multi-way geometry encoding.pdf:pdf},
journal = {Technical Report},
number = {August 2002},
pages = {1--9},
title = {{Multi-way geometry encoding}},
url = {http://citeseerx.ist.psu.edu/viewdoc/download?doi=10.1.1.19.9593{\&}rep=rep1{\&}type=pdf},
year = {2002}
}
@article{Isenburg2005d,
abstract = { Recent years have seen an immense increase in the complexity of geometric data sets. Today's gigabyte-sized polygon models can no longer be completely loaded into the main memory of common desktop PCs. Unfortunately, current mesh formats, which were designed years ago when meshes were orders of magnitudes smaller, do not account for this. Using such formats to store large meshes is inefficient and complicates all subsequent processing. We describe a streaming format for polygon meshes that is simple enough to replace current offline mesh formats and is more suitable for representing large data sets. Furthermore, it is an ideal input and output format for I/O-efficient out-of-core algorithms that process meshes in a streaming, possibly pipelined, fashion. This paper chiefly concerns the underlying theory and the practical aspects of creating and working with this new representation. In particular, we describe desirable qualities for streaming meshes and methods for converting meshes from a traditional to a streaming format. A central theme of this paper is the issue of coherent and compatible layouts of the mesh vertices and polygons. We present metrics and diagrams that characterize the coherence of a mesh layout and suggest appropriate strategies for improving its "streamability". To this end, we outline several out-of-core algorithms for reordering meshes with poor coherence, and present results for a menagerie of well known and generally incoherent surface meshes.},
author = {Isenburg, Martin and Lindstrom, Peter},
doi = {10.1109/VIS.2005.94},
file = {:C$\backslash$:/Users/abbas/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Isenburg, Lindstrom - 2005 - Streaming meshes.pdf:pdf},
isbn = {0780394623},
issn = {0780394623},
journal = {Proceedings of the IEEE Visualization Conference},
pages = {30},
title = {{Streaming meshes}},
year = {2005}
}
@article{Isenburg2004a,
author = {Isenburg, Martin and Snoeyink, Jack},
file = {:C$\backslash$:/Users/abbas/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Isenburg, Snoeyink - 2004 - Graph coding and connectivity compression.pdf:pdf},
journal = {Manuscript in Preparation},
title = {{Graph coding and connectivity compression}},
url = {http://152.2.128.56/{~}isenburg/papers/is-gccc-04.pdf},
year = {2004}
}
@misc{Web3DConsortium2015a,
author = {{Web3D Consortium}},
pages = {3},
publisher = {ISO/IEC 19776-2:2015},
title = {{Extensible 3D (X3D) encodings -- Part 2: Classic VRML encoding}},
url = {https://www.iso.org/standard/60503.html},
year = {2015}
}
@inproceedings{Kazhdan2006a,
abstract = {Poisson surface reconstruction creates watertight surfaces from oriented point sets. In this work we extend the technique to explicitly incorporate the points as interpolation constraints. The extension can be interpreted as a generalization of the underlying mathematical framework to a screened Poisson equation. In contrast to other image and geometry processing techniques, the screening term is defined over a sparse set of points rather than over the full domain. We show that these sparse constraints can nonetheless be integrated efficiently. Because the modified linear system retains the same finite-element discretization, the sparsity structure is unchanged, and the system can still be solved using a multigrid approach. Moreover we present several algorithmic improvements that together reduce the time complexity of the solver to linear in the number of points, thereby enabling faster, higher-quality surface reconstructions.},
archivePrefix = {arXiv},
arxivId = {1006.4903},
author = {Kazhdan, Michael and Bolitho, Matthew and Hoppe, Hugues},
booktitle = {Proceedings of Eurographics Symposium on Geometry Processing},
doi = {10.1145/1364901.1364904},
eprint = {1006.4903},
file = {:C$\backslash$:/Users/abbas/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Kazhdan, Hoppe - 2013 - Screened poisson surface reconstruction(2).pdf:pdf},
isbn = {3905673363},
issn = {30905673},
number = {3},
pages = {1--13},
pmid = {15096211},
title = {{Poisson Surface Reconstruction}},
volume = {32},
year = {2006}
}
@article{Stereo,
author = {Stereo, Photometric},
file = {:C$\backslash$:/Users/abbas/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Stereo - Unknown - Surface Representations Surface Normals.pdf:pdf},
number = {August 2006},
pages = {1--14},
title = {{Surface Representations Surface Normals}}
}