gnunetbib

Bibliography (BibTeX, based on AnonBib)
Log | Files | Refs | README | LICENSE

commit 08ca4041589a255c693e1680fab38cad233e39d4
parent 34ba7f535e474b2c18465d31d329d629ba347860
Author: Nils Gillmann <ng0@n0.is>
Date:   Sun,  7 Oct 2018 13:36:21 +0000

Fix months.

Signed-off-by: Nils Gillmann <ng0@n0.is>

Diffstat:
Mgnunetbib.bib | 12++++++------
1 file changed, 6 insertions(+), 6 deletions(-)

diff --git a/gnunetbib.bib b/gnunetbib.bib @@ -1182,7 +1182,7 @@ handle replication and, thus, to trade off replication costs for fair load distr journal = {Communications Surveys Tutorials, IEEE}, volume = {14}, year = {2012}, - month = {First}, + month = jan, pages = {131--155}, abstract = {Many network solutions and overlay networks utilize probabilistic techniques to reduce information processing and networking costs. This survey article presents a number of frequently used and useful probabilistic techniques. Bloom filters and their variants are of prime importance, and they are heavily used in various distributed systems. This has been reflected in recent research and many new algorithms have been proposed for distributed systems that are either directly or indirectly based on Bloom filters. In this survey, we give an overview of the basic and advanced techniques, reviewing over 20 variants and discussing their application in distributed systems, in particular for caching, peer-to-peer systems, routing and forwarding, and measurement data summarization}, keywords = {Arrays, Bismuth, bloom filters, distributed systems, Filtering theory, filters, Fingerprint recognition, forwarding, information processing, measurement data summarization, networking costs, overlay networks, Peer to peer computing, peer-to-peer computing, Peer-to-peer systems, Probabilistic logic, probabilistic structures, probabilistic techniques, probability, routing, telecommunication network routing}, @@ -5539,7 +5539,7 @@ Although several solutions exist in the relevant literature for this problem, th @conference {2007_9, title = {ParaNets: A Parallel Network Architecture for Challenged Networks}, year = {2007}, - month = {March 2007 }, + month = mar, abstract = {Networks characterized by challenges, such as intermittent connectivity, network heterogeneity, and large delays, are called "challenged networks". We propose a novel network architecture for challenged networks dubbed Parallel Networks, or, ParaNets. The vision behind ParaNets is to have challenged network protocols operate over multiple heterogenous networks, simultaneously available, through one or more devices. We present the ParaNets architecture and discuss its short-term challenges and longterm implications. We also argue, based on current research trends and the ParaNets architecture, for the evolution of the conventional protocol stack to a more flexible cross-layered protocol tree. To demonstrate the potential impact of ParaNets, we use Delay Tolerant Mobile Networks (DTMNs) as a representative challenged network over which we evaluate ParaNets. Our ultimate goal in this paper is to open the way for further work in challenged networks using ParaNets as the underlying architecture}, isbn = {978-0-7695-3001-7}, www_section = {http://ieeexplore.ieee.org/Xplore/login.jsp?reload=true\&url=http\%3A\%2F\%2Fieeexplore.ieee.org\%2Fiel5\%2F4389542\%2F4389543\%2F04389561.pdf\%3Farnumber\%3D4389561\&authDecision=-203}, @@ -7239,7 +7239,7 @@ two shallow circuits: one for generating many arbitrarily but identically biased title = {Privacy Preserving Nearest Neighbor Search}, booktitle = {Data Mining Workshops, 2006. ICDM Workshops 2006. Sixth IEEE International Conference on}, year = {2006}, - month = {Dec}, + month = dec, abstract = {Data mining is frequently obstructed by privacy concerns. In many cases data is distributed, and bringing the data together in one place for analysis is not possible due to privacy laws (e.g. HIPAA) or policies. Privacy preserving data mining techniques have been developed to address this issue by providing mechanisms to mine the data while giving certain privacy guarantees. In this work we address the issue of privacy preserving nearest neighbor search, which forms the kernel of many data mining applications. To this end, we present a novel algorithm based on secure multiparty computation primitives to compute the nearest neighbors of records in horizontally distributed data. We show how this algorithm can be used in three important data mining algorithms, namely LOF outlier detection, SNN clustering, and kNN classification}, keywords = {Clustering algorithms, Computer science, Conferences, cryptography, Data mining, data privacy, distributed computing, Kernel, kNN classification, LOF outlier detection, Medical diagnostic imaging, multiparty computation primitives, nearest neighbor search, Nearest neighbor searches, pattern clustering, privacy preservation, SNN clustering}, doi = {10.1109/ICDMW.2006.133}, @@ -10324,7 +10324,7 @@ Parallel re-encryption mixnets offer security guarantees comparable to those of journal = {Knowledge and Data Engineering, IEEE Transactions on}, volume = {16}, year = {2004}, - month = {Jan}, + month = jan, pages = {28-40}, abstract = {Current Web search engines are built to serve all users, independent of the special needs of any individual user. Personalization of Web search is to carry out retrieval for each user incorporating his/her interests. We propose a novel technique to learn user profiles from users{\textquoteright} search histories. The user profiles are then used to improve retrieval effectiveness in Web search. A user profile and a general profile are learned from the user{\textquoteright}s search history and a category hierarchy, respectively. These two profiles are combined to map a user query into a set of categories which represent the user{\textquoteright}s search intention and serve as a context to disambiguate the words in the user{\textquoteright}s query. Web search is conducted based on both the user query and the set of categories. Several profile learning and category mapping algorithms and a fusion algorithm are provided and evaluated. Experimental results indicate that our technique to personalize Web search is both effective and efficient}, keywords = {BANDWIDTH, category hierarchy, category mapping algorithms, Displays, fusion algorithm, History, human factors, information filtering, information retrieval, libraries, personalized Web search, profile learning, retrieval effectiveness, search engines, search intention, special needs, user interfaces, user profiles, user search histories, Web search, Web search engines}, @@ -13011,7 +13011,7 @@ The read-only file system makes the security of published content independent fr @conference {2002_1, title = {Finite-length analysis of low-density parity-check codes on the binary erasure channel}, year = {2002}, - month = {01/2002 }, + month = jan, abstract = {In this paper, we are concerned with the finite-length analysis of low-density parity-check (LDPC) codes when used over the binary erasure channel (BEC). The main result is an expression for the exact average bit and block erasure probability for a given regular ensemble of LDPC codes when decoded iteratively. We also give expressions for upper bounds on the average bit and block erasure probability for regular LDPC ensembles and the standard random ensemble under maximum-likelihood (ML) decoding. Finally, we present what we consider to be the most important open problems in this area}, keywords = {BEC, coding theory, low-density parity-check, maximum-likelihood}, doi = {10.1109/TIT.2002.1003839 }, @@ -16339,7 +16339,7 @@ The technique can also be used to form rosters of untraceable digital pseudonyms title = {Non-Discretionary Access Control for Decentralized Computing Systems}, number = {MIT/LCS/TR-179}, year = {1977}, - month = {may}, + month = may, school = {Laboratory for Computer Science, Massachusetts Institute of Technology}, type = {S. M. \& E. E. thesis}, address = {Cambridge, MA},