forked from alshedivat/al-folio
-
Notifications
You must be signed in to change notification settings - Fork 0
/
papers.bib
655 lines (645 loc) · 45.9 KB
/
papers.bib
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
@article{costaHIHISIVDatabaseGene2024,
abbr = {BMC Bioinf.},
selected = {true},
title = {{HIHISIV}: a database of gene expression in {HIV} and {SIV} host immune response},
volume = {25},
copyright = {All rights reserved},
issn = {1471-2105},
shorttitle = {{HIHISIV}},
url = {https://bmcbioinformatics.biomedcentral.com/articles/10.1186/s12859-024-05740-7},
doi = {10.1186/s12859-024-05740-7},
abstract = {Abstract
In the battle of the host against lentiviral pathogenesis, the immune response is crucial. However, several questions remain unanswered about the interaction with different viruses and their influence on disease progression. The simian immunodeficiency virus (SIV) infecting nonhuman primates (NHP) is widely used as a model for the study of the human immunodeficiency virus (HIV) both because they are evolutionarily linked and because they share physiological and anatomical similarities that are largely explored to understand the disease progression. The HIHISIV database was developed to support researchers to integrate and evaluate the large number of transcriptional data associated with the presence/absence of the pathogen (SIV or HIV) and the host response (NHP and human). The datasets are composed of microarray and RNA-Seq gene expression data that were selected, curated, analyzed, enriched, and stored in a relational database. Six query templates comprise the main data analysis functions and the resulting information can be downloaded. The HIHISIV database, available at
https://hihisiv.github.io
, provides accurate resources for browsing and visualizing results and for more robust analyses of pre-existing data in transcriptome repositories.},
language = {en},
number = {1},
urldate = {2024-03-23},
journal = {BMC Bioinformatics},
author = {Costa, Raquel L. and Gadelha, Luiz and D’arc, Mirela and Ribeiro-Alves, Marcelo and Robertson, David L. and Schwartz, Jean-Marc and Soares, Marcelo A. and Porto, Fábio},
month = mar,
year = {2024},
pages = {125},
html = {https://bmcbioinformatics.biomedcentral.com/articles/10.1186/s12859-024-05740-7},
pdf = {https://bmcbioinformatics.biomedcentral.com/counter/pdf/10.1186/s12859-024-05740-7.pdf},
}
@article{zulfiqarImplementationFAIRPractices2024a,
abbr = {Metabolites},
selected = {true},
title = {Implementation of {FAIR} {Practices} in {Computational} {Metabolomics} {Workflows}—{A} {Case} {Study}},
volume = {14},
copyright = {https://creativecommons.org/licenses/by/4.0/},
issn = {2218-1989},
url = {https://www.mdpi.com/2218-1989/14/2/118},
doi = {10.3390/metabo14020118},
abstract = {Scientific workflows facilitate the automation of data analysis tasks by integrating various software and tools executed in a particular order. To enable transparency and reusability in workflows, it is essential to implement the FAIR principles. Here, we describe our experiences implementing the FAIR principles for metabolomics workflows using the Metabolome Annotation Workflow (MAW) as a case study. MAW is specified using the Common Workflow Language (CWL), allowing for the subsequent execution of the workflow on different workflow engines. MAW is registered using a CWL description on WorkflowHub. During the submission process on WorkflowHub, a CWL description is used for packaging MAW using the Workflow RO-Crate profile, which includes metadata in Bioschemas. Researchers can use this narrative discussion as a guideline to commence using FAIR practices for their bioinformatics or cheminformatics workflows while incorporating necessary amendments specific to their research area.},
language = {en},
number = {2},
urldate = {2024-04-16},
journal = {Metabolites},
author = {Zulfiqar, Mahnoor and Crusoe, Michael R. and König-Ries, Birgitta and Steinbeck, Christoph and Peters, Kristian and Gadelha, Luiz},
month = feb,
year = {2024},
pages = {118},
html = {https://www.mdpi.com/2218-1989/14/2/118},
pdf = {https://www.mdpi.com/2218-1989/14/2/118/pdf?version=1708164641},
}
@inproceedings{krugerGHGAArchiveSelected2024a,
abbr = {IWSG},
address = {Toulouse, France},
title = {The {GHGA} {Archive}: {Selected} {Updates}},
copyright = {Creative Commons Attribution 4.0 International},
shorttitle = {The {GHGA} {Archive}},
url = {https://zenodo.org/doi/10.5281/zenodo.13863209},
doi = {10.5281/ZENODO.13863209},
abstract = {The German Human Genome-Phenome Archive (GHGA) is a cross-institutional project and German National Research Data Infrastructure (NFDI) consortium for the development of a scientific gateway for secure omics data sharing based on FAIR principles to act as the German node of the federated European Genome Archive (fEGA), participating also in the European Genomics Data Infrastructure (GDI) project. Started in 2020, the GHGA is reaching its first major release milestone: having released an online science gateway for metadata browsing termed the Metadata Catalog, the next step involves the development of a portal that extends the functionality already available to also include authentication, data download (via an external tool), and data access request management: the GHGA Archive. We present here a brief overview of the new technologies and functionalities that will be available for the Archive version of the GHGA science gateway.},
urldate = {2024-10-01},
booktitle = {16th {International} {Workshop} on {Science} {Gateways} ({IWSG2024})},
author = {Krüger, Jens and Orellana Figueroa, Jordy Didier and Sezer, Zehra Hazal and Zajac, Thomas Jakob and Breuer, Kersten and Rocha Gadelha Junior, Luiz Manoel and Zwerschke, Christoph and Sürün, Bilge and Nahnsen, Sven},
month = sep,
year = {2024},
note = {Publisher: Zenodo},
keywords = {FAIR, human genome data, NFDI, omics, science gateway, sensitive data, FEGA},
html = {https://zenodo.org/doi/10.5281/zenodo.13863209},
pdf = {https://zenodo.org/records/13863209/files/IWSG2024_paper_5.pdf?download=1},
}
@article{zulfiqarMAWReproducibleMetabolome2023a,
abbr = {J. Cheminf.},
title = {{MAW}: the reproducible {Metabolome} {Annotation} {Workflow} for untargeted tandem mass spectrometry},
volume = {15},
copyright = {All rights reserved},
issn = {1758-2946},
shorttitle = {{MAW}},
url = {https://jcheminf.biomedcentral.com/articles/10.1186/s13321-023-00695-y},
doi = {10.1186/s13321-023-00695-y},
abstract = {Abstract
Mapping the chemical space of compounds to chemical structures remains a challenge in metabolomics. Despite the advancements in untargeted liquid chromatography-mass spectrometry (LC–MS) to achieve a high-throughput profile of metabolites from complex biological resources, only a small fraction of these metabolites can be annotated with confidence. Many novel computational methods and tools have been developed to enable chemical structure annotation to known and unknown compounds such as in silico generated spectra and molecular networking. Here, we present an automated and reproducible
M
etabolome
A
nnotation
W
orkflow (MAW) for untargeted metabolomics data to further facilitate and automate the complex annotation by combining tandem mass spectrometry (MS
2
) input data pre-processing, spectral and compound database matching with computational classification, and in silico annotation. MAW takes the LC-MS
2
spectra as input and generates a list of putative candidates from spectral and compound databases. The databases are integrated via the R package Spectra and the metabolite annotation tool SIRIUS as part of the R segment of the workflow (MAW-R). The final candidate selection is performed using the cheminformatics tool RDKit in the Python segment (MAW-Py). Furthermore, each feature is assigned a chemical structure and can be imported to a chemical structure similarity network. MAW is following the FAIR (Findable, Accessible, Interoperable, Reusable) principles and has been made available as the docker images, maw-r and maw-py. The source code and documentation are available on GitHub (
https://github.com/zmahnoor14/MAW
). The performance of MAW is evaluated on two case studies. MAW can improve candidate ranking by integrating spectral databases with annotation tools like SIRIUS which contributes to an efficient candidate selection procedure. The results from MAW are also reproducible and traceable, compliant with the FAIR guidelines. Taken together, MAW could greatly facilitate automated metabolite characterization in diverse fields such as clinical metabolomics and natural product discovery.},
language = {en},
number = {1},
urldate = {2023-07-27},
journal = {Journal of Cheminformatics},
author = {Zulfiqar, Mahnoor and Gadelha, Luiz and Steinbeck, Christoph and Sorokina, Maria and Peters, Kristian},
month = mar,
year = {2023},
pages = {32},
html = {https://jcheminf.biomedcentral.com/articles/10.1186/s13321-023-00695-y},
pdf = {https://jcheminf.biomedcentral.com/counter/pdf/10.1186/s13321-023-00695-y.pdf},
}
@article{gadelhaGermanHumanGenomePhenome2023,
abbr = {CoRDI},
title = {German {Human} {Genome}-{Phenome} {Archive} in an {International} {Context}: {Toward} a {Federated} {Infrastructure} for {Managing} and {Analyzing} {Genomics} and {Health} {Data}},
volume = {1},
copyright = {All rights reserved},
issn = {2941-296X},
shorttitle = {German {Human} {Genome}-{Phenome} {Archive} in an {International} {Context}},
url = {https://www.tib-op.org/ojs/index.php/CoRDI/article/view/394},
doi = {10.52825/cordi.v1i.394},
abstract = {With increasing numbers of human omics data, there is an urgent need for adequate resources for data sharing while also standardizing and harmonizing data processing. As part of the National Research Data Infrastructure (NFDI), the German Human Genome-Phenome Archive (GHGA) strives to connect the data from German researchers and their institutions to the international landscape of genome research. To achieve this, GHGA partners up with international activities such as the federated European Genome-Phenome Archive (EGA) [1] and the recently funded European Genomic Data Infrastructure (GDI) project to enable participation in international studies while ensuring at the same time the proper protection of the sensitive patient data included in GHGA.},
urldate = {2023-09-10},
journal = {Proceedings of the Conference on Research Data Infrastructure},
author = {Gadelha, Luiz and Eufinger, Jan},
month = sep,
year = {2023},
html = {https://www.tib-op.org/ojs/index.php/CoRDI/article/view/394},
pdf = {https://www.tib-op.org/ojs/index.php/CoRDI/article/view/394/446},
}
@inproceedings{gadelhaFrameworkIntegrativeFAIR2022,
abbr = {eScience},
selected = {true},
address = {Salt Lake City, UT, USA},
title = {Toward a {Framework} for {Integrative}, {FAIR}, and {Reproducible} {Management} of {Data} on the {Dynamic} {Balance} of {Microbial} {Communities}},
copyright = {All rights reserved},
isbn = {978-1-66546-124-5},
url = {https://ieeexplore.ieee.org/document/9973522/},
doi = {10.1109/eScience55777.2022.00080},
urldate = {2022-12-21},
booktitle = {2022 {IEEE} 18th {International} {Conference} on e-{Science} (e-{Science})},
publisher = {IEEE},
author = {Gadelha, Luiz and Hohmuth, Martin and Zulfiqar, Mahnoor and Schone, David and Samuel, Sheeba and Sorokina, Maria and Steinbeck, Christoph and Konig-Ries, Birgitta},
month = oct,
year = {2022},
pages = {443--449},
html = {https://ieeexplore.ieee.org/document/9973522/},
pdf = {https://arxiv.org/pdf/2207.06890},
}
@incollection{ocanaParslRNASeqEfficientScalable2022,
abbr = {CARLA},
address = {Cham},
title = {{ParslRNA}-{Seq}: {An} {Efficient} and {Scalable} {RNAseq} {Analysis} {Workflow} for {Studies} of {Differentiated} {Gene} {Expression}},
volume = {1660},
copyright = {All rights reserved},
isbn = {978-3-031-23820-8 978-3-031-23821-5},
shorttitle = {{ParslRNA}-{Seq}},
url = {https://link.springer.com/10.1007/978-3-031-23821-5_13},
language = {en},
urldate = {2022-12-21},
booktitle = {High {Performance} {Computing}},
publisher = {Springer International Publishing},
author = {Ocaña, Kary and Cruz, Lucas and Coelho, Micaella and Terra, Rafael and Galheigo, Marcelo and Carneiro, Andre and Carvalho, Diego and Gadelha, Luiz and Boito, Francieli and Navaux, Philippe and Osthoff, Carla},
editor = {Navaux, Philippe and Barrios H., Carlos J. and Osthoff, Carla and Guerrero, Ginés},
year = {2022},
doi = {10.1007/978-3-031-23821-5_13},
note = {Series Title: Communications in Computer and Information Science},
pages = {174--189},
html = {https://link.springer.com/10.1007/978-3-031-23821-5_13},
}
@article{cruzParallelPerformanceProfiling2022,
abbr = {Comp. y Sis.},
title = {Parallel {Performance} and {I}/{O} {Profiling} of {HPC} {RNA}-{Seq} {Applications}},
volume = {26},
copyright = {All rights reserved},
issn = {2007-9737, 1405-5546},
url = {https://cys.cic.ipn.mx/ojs/index.php/CyS/article/view/4437},
doi = {10.13053/cys-26-4-4437},
number = {4},
urldate = {2023-01-05},
journal = {Computación y Sistemas},
author = {Cruz, Lucas and Coelho, Micaella and Galheigo, Marcelo and Carneiro, Andre and Carvalho, Diego and Gadelha, Luiz and Boito, Francieli and Navaux, Philippe and Osthoff, Carla and Ocaña, Kary},
month = dec,
year = {2022},
html = {https://cys.cic.ipn.mx/ojs/index.php/CyS/article/view/4437},
pdf = {https://cys.cic.ipn.mx/ojs/index.php/CyS/article/view/4437/3471},
}
@article{Peterson2022,
abbr = {Biodiv. Inf.},
author = {Peterson, A. Townsend and Aiello-Lammens, Matthew and Amatulli, Giuseppe and Anderson, Robert and Cobos, Marlon and Diniz-Filho, Jos{\'{e}} Alexandre and Escobar, Luis and Feng, Xiao and Franklin, Janet and Gadelha, Luiz and Georges, Damien and Gu{\'{e}}guen, M and Gueta, Tomer and Ingenloff, Kate and Jarvie, Scott and Jim{\'{e}}nez, Laur and Karger, Dirk and Kass, Jamie and Kearney, Michael and Loyola, Rafael and Machado-Stredel, Fernando and Mart{\'{i}}nez-Meyer, Enrique and Merow, Cory and Mondelli, Maria Luiza and Mortara, Sara and Muscarella, Robert and Myers, Corinne and Naimi, Babak and Noesgaard, Daniel and Ondo, Ian and Osorio-Olvera, Luis and Owens, Hannah and Pearson, Richard and Pinilla-Buitrago, Gonzalo and S{\'{a}}nchez-Tapia, Andrea and Saupe, Erin and Thuiller, Wilfried and Varela, Sara and Warren, Dan and Wieczorek, John and Yates, Katherine and Zhu, Gengping and Zuquim, Gabriela and Zurell, Damaris},
journal = {Biodiversity Informatics},
title = {{ENM2020: A Free Online Course and Set of Resources on Modeling Species' Niches and Distributions | Biodiversity Informatics}},
html = {https://journals.ku.edu/jbi/article/view/15016},
pdf = {https://journals.ku.edu/jbi/article/view/15016/15152},
volume = {17},
year = {2022}
}
@inproceedings{Mondelli2021,
abbr = {eScience},
author = {Mondelli, Maria Luiza and Samuel, Sheeba and Konig-Ries, Birgitta and Gadelha, Luiz},
booktitle = {2021 IEEE 17th International Conference on eScience (eScience)},
doi = {10.1109/eScience51609.2021.00057},
isbn = {978-1-6654-0361-0},
month = {sep},
pages = {283--288},
publisher = {IEEE},
title = {{Capturing and Semantically Describing Provenance to Tell the Story of R Scripts}},
html = {https://ieeexplore.ieee.org/document/9582412/},
year = {2021}
}
@article{Gadelha2020a,
abbr = {WIREs DMKD},
selected = {true},
abstract = {The unprecedented size of the human population, along with its associated economic activities, has an ever-increasing impact on global environments. Across the world, countries are concerned about the growing resource consumption and the capacity of ecosystems to provide resources. To effectively conserve biodiversity, it is essential to make indicators and knowledge openly available to decision-makers in ways that they can effectively use them. The development and deployment of tools and techniques to generate these indicators require having access to trustworthy data from biological collections, field surveys and automated sensors, molecular data, and historic academic literature. The transformation of these raw data into synthesized information that is fit for use requires going through many refinement steps. The methodologies and techniques applied to manage and analyze these data constitute an area usually called biodiversity informatics. Biodiversity data follow a life cycle consisting of planning, collection, certification, description, preservation, discovery, integration, and analysis. Researchers, whether producers or consumers of biodiversity data, will likely perform activities related to at least one of these steps. This article explores each stage of the life cycle of biodiversity data, discussing its methodologies, tools, and challenges. This article is categorized under: Algorithmic Development > Biological Data Mining.},
author = {Gadelha, Luiz and de Siracusa, Pedro C. and Dalcin, Eduardo Couto and da Silva, Lu{\'{i}}s Alexandre Estev{\~{a}}o and Augusto, Douglas A. and Krempser, Eduardo and Affe, Helen Michelle and Costa, Raquel Lopes and Mondelli, Maria Luiza and Meirelles, Pedro Milet and Thompson, Fabiano and Chame, Marcia and Ziviani, Artur and de Siqueira, Marinez Ferreira},
doi = {10.1002/widm.1394},
issn = {19424795},
journal = {Wiley Interdisciplinary Reviews: Data Mining and Knowledge Discovery},
keywords = {biodiversity informatics,computational modeling,scientific data management,scientific workflows},
month = {nov},
number = {1},
pages = {e1394},
title = {{A survey of biodiversity informatics: Concepts, practices, and challenges}},
html = {https://onlinelibrary.wiley.com/doi/10.1002/widm.1394},
pdf = {https://wires.onlinelibrary.wiley.com/doi/epdf/10.1002/widm.1394},
volume = {11},
year = {2021}
}
@incollection{Gadelha2020,
author = {Gadelha, Luiz},
booktitle = {Biotecnologia Marinha},
chapter = {15},
editor = {Thompson, Fabiano and Thompson, Cristiane},
isbn = {1010001000},
pages = {359--381},
publisher = {Ed. FURG},
title = {{Gest{\~{a}}o de Dados de Biotecnologia Marinha}},
pdf = {https://cienciasdomarbrasil.furg.br/images/livros/LivroBiotecnologia.pdf},
year = {2020}
}
@article{Siracusa2020,
abbr = {Sci. Rep.},
selected = {true},
author = {de Siracusa, Pedro C. and Gadelha, Luiz and Ziviani, Artur},
doi = {10.1038/s41598-020-60134-y},
issn = {2045-2322},
journal = {Scientific Reports},
month = {dec},
number = {1},
pages = {3358},
title = {{New perspectives on analysing data from biological collections based on social network analytics}},
html = {http://www.nature.com/articles/s41598-020-60134-y},
volume = {10},
pdf = {https://www.nature.com/articles/s41598-020-60134-y.pdf},
year = {2020}
}
@article{Ocana2020,
abbr = {FGCS},
author = {Oca{\~{n}}a, Kary A.C.S. and Galheigo, Marcelo and Osthoff, Carla and Gadelha, Luiz and Porto, Fabio and Gomes, Ant{\^{o}}nio Tadeu A. and de Oliveira, Daniel and Vasconcelos, Ana Tereza},
doi = {10.1016/j.future.2020.01.030},
issn = {0167739X},
journal = {Future Generation Computer Systems},
month = {jun},
pages = {192--214},
title = {{BioinfoPortal: A scientific gateway for integrating bioinformatics applications on the Brazilian national high-performance computing network}},
html = {https://linkinghub.elsevier.com/retrieve/pii/S0167739X19318230},
volume = {107},
year = {2020}
}
@incollection{Mondelli2019,
abbr = {ER Workshops},
author = {Mondelli, Maria Luiza and {Townsend Peterson}, A. and Gadelha, Luiz},
booktitle = {Advances in Conceptual Modeling. ER 2019. Lecture Notes in Computer Science, vol. 11787},
doi = {10.1007/978-3-030-34146-6_3},
pages = {23--33},
publisher = {Springer},
title = {{Exploring Reproducibility and FAIR Principles in Data Science Using Ecological Niche Modeling as a Case Study}},
html = {http://link.springer.com/10.1007/978-3-030-34146-6_3},
year = {2019}
}
@article{Mondelli2019a,
abbr = {iSys},
author = {Mondelli, Maria Luiza and Gadelha, Luiz and Ziviani, Artur},
journal = {iSys - Revista Brasileira de Sistemas de Informa{\c{c}}{\~{a}}o},
number = {3},
pages = {53--72},
title = {{O Que os Pa{\'{i}}ses Escutam: Analisando a Rede de G{\^{e}}neros Musicais ao Redor do Mundo}},
volume = {12},
html = {http://seer.unirio.br/isys/article/view/8099},
pdf = {http://seer.unirio.br/index.php/isys/article/view/8099/7927},
year = {2019}
}
@inproceedings{Ocana2019,
abbr = {CCGrid},
author = {Ocana, Kary and Galheigo, Marcelo and Osthoff, Carla and Gadelha, Luiz and Gomes, Antonio Tadeu A. and {De Oliveira}, Daniel and Porto, Fabio and Vasconcelos, Ana Tereza},
booktitle = {2019 19th IEEE/ACM International Symposium on Cluster, Cloud and Grid Computing (CCGRID)},
doi = {10.1109/CCGRID.2019.00082},
isbn = {978-1-7281-0912-1},
month = {may},
pages = {638--647},
publisher = {IEEE},
title = {{Towards a Science Gateway for Bioinformatics: Experiences in the Brazilian System of High Performance Computing}},
html = {https://ieeexplore.ieee.org/document/8752906/},
year = {2019}
}
@article{Chame2019,
abbr = {JHIR},
selected = {true},
abstract = {The well-being of wildlife health involves many challenges, such as monitoring the movement of pathogens; expanding health surveillance beyond humans; collecting data and extracting information to identify and predict risks; integrating specialists from different areas to handle data, species and distinct social and environmental contexts; and, the commitment to bringing relevant information to society. In Brazil, there is still the difficulty of building a mechanism that is not impaired by its large territorial extension and its poorly integrated sectoral policies. The Brazilian Wildlife Health Information System, SISS-Geo, is a platform for collaborative monitoring that intends to overcome the challenges in wildlife health. It aims integration and participation of various segments of society, encompassing: the registration of occurrences by citizen scientists; the reliable diagnosis of pathogens from the laboratory and expert networks; and computational and mathematical challenges in analytical and predictive systems, knowledge extraction, data integration and visualization, and geographic information systems. It has been successfully applied to support decision-making on recent wildlife health events, such as a Yellow Fever epizooty.},
author = {Chame, Marcia and Barbosa, Helio J. C. and Gadelha, Luiz and Augusto, Douglas A. and Krempser, Eduardo and Abdalla, Livia},
doi = {10.1007/s41666-019-00055-2},
issn = {2509-4971},
journal = {Journal of Healthcare Informatics Research},
month = {dec},
number = {4},
pages = {414--440},
title = {{SISS-Geo: Leveraging Citizen Science to Monitor Wildlife Health Risks in Brazil}},
html = {http://link.springer.com/10.1007/s41666-019-00055-2},
volume = {3},
year = {2019}
}
@inproceedings{Mondelli2018,
abbr = {BrasNAM},
author = {Mondelli, Maria Luiza and Gadelha, Luiz and Ziviani, Artur},
booktitle = {Anais do VII Brazilian Workshop on Social Network Analysis and Mining (BrasNAM 2018)},
pages = {148--159},
publisher = {SBC},
title = {{O Que os Pa{\'{i}}ses Escutam: Analisando a Rede de G{\^{e}}neros Musicais ao Redor do Mundo}},
html = {https://sol.sbc.org.br/index.php/brasnam/article/view/3586},
pdf = {https://sol.sbc.org.br/index.php/brasnam/article/view/3586/3545},
year = {2018}
}
@article{Mondelli2018a,
abbr = {PeerJ},
selected = {true},
abstract = {Advances in sequencing techniques have led to exponential growth in biological data, demanding the development of large-scale bioinformatics experiments. Because these experiments are computation- and data-intensive, they require high-performance computing techniques and can benefit from specialized technologies such as Scientific Workflow Management Systems and databases. In this work, we present BioWorkbench, a framework for managing and analyzing bioinformatics experiments. This framework automatically collects provenance data, including both performance data from workflow execution and data from the scientific domain of the workflow application. Provenance data can be analyzed through a web application that abstracts a set of queries to the provenance database, simplifying access to provenance information. We evaluate BioWorkbench using three case studies: SwiftPhylo, a phylogenetic tree assembly workflow; SwiftGECKO, a comparative genomics workflow; and RASflow, a RASopathy analysis workflow. We analyze each workflow from both computational and scientific domain perspectives, by using queries to a provenance and annotation database. Some of these queries are available as a pre-built feature of the BioWorkbench web application. Through the provenance data, we show that the framework is scalable and achieves high-performance, reducing up to 98% of the case studies execution time. We also show how the application of machine learning techniques can enrich the analysis process.},
author = {Mondelli, Maria Luiza and Magalh{\~{a}}es, Thiago and Loss, Guilherme and Wilde, Michael and Foster, Ian and Mattoso, Marta and Katz, Daniel and Barbosa, Helio and de Vasconcelos, Ana Tereza R. and Oca{\~{n}}a, Kary and Gadelha, Luiz},
doi = {10.7717/peerj.5551},
issn = {2167-8359},
journal = {PeerJ},
month = {aug},
pages = {e5551},
title = {{BioWorkbench: a high-performance framework for managing and analyzing bioinformatics experiments}},
html = {https://peerj.com/articles/5551},
volume = {6},
pdf = {https://peerj.com/articles/5551.pdf},
year = {2018}
}
@article{Thompson2018,
abbr = {FMS},
author = {Thompson, Fabiano and Kr{\"{u}}ger, Ricardo and Thompson, Cristiane C. and Berlinck, Roberto G. S. and Coutinho, Ricardo and Landell, Melissa F. and Pav{\~{a}}o, Mauro and Mour{\~{a}}o, Paulo A. S. and Salles, Ana and Negri, Naiane and Lopes, Fabyano A. C. and Freire, Vitor and Macedo, Alexandre J. and Maraschin, Marcelo and P{\'{e}}rez, Carlos D. and Pereira, Renato C. and Radis-Baptista, Gandhi and Rezende, Rachel P. and Valenti, Wagner C. and Abreu, Paulo C. and {Biotecmar Network}},
doi = {10.3389/fmars.2018.00236},
issn = {2296-7745},
journal = {Frontiers in Marine Science},
month = {jul},
title = {{Marine Biotechnology in Brazil: Recent Developments and Its Potential for Innovation}},
html = {https://www.frontiersin.org/article/10.3389/fmars.2018.00236/full},
pdf = {https://www.frontiersin.org/articles/10.3389/fmars.2018.00236/pdf},
volume = {5},
year = {2018}
}
@incollection{Sanchez-Tapia2018,
abbr = {CARLA},
author = {S{\'{a}}nchez-Tapia, Andrea and de Siqueira, Marinez Ferreira and Lima, Rafael Oliveira and Barros, Felipe Sodr{\'{e}} M. and Gall, Guilherme M. and Gadelha, Luiz and da Silva, Lu{\'{i}}s Alexandre E. and Osthoff, Carla},
booktitle = {High Performance Computing: 4th Latin American Conference, CARLA 2017. Communications in Computer and Information Science, vol. 796},
doi = {10.1007/978-3-319-73353-1_15},
pages = {218--232},
publisher = {Springer},
title = {{Model-R: A Framework for Scalable and Reproducible Ecological Niche Modeling}},
html = {http://link.springer.com/10.1007/978-3-319-73353-1_15},
year = {2018}
}
@article{Costa2017,
abbr = {PeerJ},
selected = {true},
abstract = {There are many steps in analyzing transcriptome data, from the acquisition of raw data to the selection of a subset of representative genes that explain a scientific hypothesis. The data produced can be represented as networks of interactions among genes and these may additionally be integrated with other biological databases, such as Protein-Protein Interactions, transcription factors and gene annotation. However, the results of these analyses remain fragmented, imposing difficulties, either for posterior inspection of results, or for meta-analysis by the incorporation of new related data. Integrating databases and tools into scientific workflows, orchestrating their execution, and managing the resulting data and its respective metadata are challenging tasks. Additionally, a great amount of effort is equally required to run in-silico experiments to structure and compose the information as needed for analysis. Different programs may need to be applied and different files are produced during the experiment cycle. In this context, the availability of a platform supporting experiment execution is paramount. We present GeNNet, an integrated transcriptome analysis platform that unifies scientific workflows with graph databases for selecting relevant genes according to the evaluated biological systems. It includes GeNNet-Wf, a scientific workflow that pre-loads biological data, pre-processes raw microarray data and conducts a series of analyses including normalization, differential expression inference, clusterization and gene set enrichment analysis. A user-friendly web interface, GeNNet-Web, allows for setting parameters, executing, and visualizing the results of GeNNet-Wf executions. To demonstrate the features of GeNNet, we performed case studies with data retrieved from GEO, particularly using a single-factor experiment in different analysis scenarios. As a result, we obtained differentially expressed genes for which biological functions were analyzed. The results are integrated into GeNNet-DB, a database about genes, clusters, experiments and their properties and relationships. The resulting graph database is explored with queries that demonstrate the expressiveness of this data model for reasoning about gene interaction networks. GeNNet is the first platform to integrate the analytical process of transcriptome data with graph databases. It provides a comprehensive set of tools that would otherwise be challenging for non-expert users to install and use. Developers can add new functionality to components of GeNNet. The derived data allows for testing previous hypotheses about an experiment and exploring new ones through the interactive graph database environment. It enables the analysis of different data on humans, rhesus, mice and rat coming from Affymetrix platforms. GeNNet is available as an open source platform at https://github.com/raquele/GeNNet and can be retrieved as a software container with the command docker pull quelopes/gennet.},
author = {Costa, Raquel L. and Gadelha, Luiz and Ribeiro-Alves, Marcelo and Porto, F{\'{a}}bio},
doi = {10.7717/peerj.3509},
issn = {2167-8359},
journal = {PeerJ},
month = {jul},
pages = {e3509},
title = {{GeNNet: an integrated platform for unifying scientific workflows and graph databases for transcriptome data analysis}},
html = {https://peerj.com/articles/3509},
pdf = {https://peerj.com/articles/3509.pdf},
volume = {5},
year = {2017}
}
@inproceedings{Mondelli2016a,
abbr = {SBBD Demo},
author = {Mondelli, Maria Luiza and de Souza, Matheus Tonelli and Oca{\~{n}}a, Kary and de Vasconcelos, Ana Tereza R. and Gadelha, Luiz},
booktitle = {Proc. of Satellite Events of the 31st Brazilian Symposium on Databases (SBBD 2016)},
pages = {117--122},
publisher = {SBC},
title = {{HPSW-Prof: A Provenance-Based Framework for Profiling High Performance Scientific Workflows}},
year = {2016}
}
@inproceedings{Mondelli2016,
abbr = {BreSci},
author = {Mondelli, Maria Luiza and Galheigo, Marcelo and Medeiros, Vivian and Bastos, Bruno Fernandes and Gomes, Antonio Tadeu Azevedo and Vasconcelos, Ana Tereza R and Gadelha, Luiz},
booktitle = {X Brazilian e-Science Workshop. Anais do XXXVI Congresso da Sociedade Brasileira de Computa{\c{c}}{\~{a}}o},
pages = {277--284},
publisher = {SBC},
title = {{Integrating Scientific Workflows with Scientific Gateways: A Bioinformatics Experiment in the Brazilian National High-Performance Computing Network}},
year = {2016}
}
@article{Meirelles2015,
abbr = {Database},
abstract = {A new open access database, Brazilian Marine Biodiversity (BaMBa) (https://marinebiodiversity.lncc.br), was developed in order to maintain large datasets from the Brazilian marine environment. Essentially, any environmental information can be added to BaMBa. Certified datasets obtained from integrated holistic studies, comprising physical–chemical parameters, -omics, microbiology, benthic and fish surveys can be deposited in the new database, enabling scientific, industrial and governmental policies and actions to be undertaken on marine resources. There is a significant number of databases, however BaMBa is the only integrated database resource both supported by a government initiative and exclusive for marine data. BaMBa is linked to the Information System on Brazilian Biodiversity (SiBBr, http://www.sibbr.gov.br/) and will offer opportunities for improved governance of marine resources and scientists' integration. Database URL: http://marinebiodiversity.lncc.br},
author = {Meirelles, Pedro Milet and Gadelha, Luiz and Francini-Filho, Ronaldo Bastos and Le{\~{a}}o, Rodrigo de Moura and Amado-Filho, Gilberto Menezes and Bastos, Alex Cardoso and Paranhos, Rodolfo Pinheiro da Rocha and Rezende, Carlos Eduardo and Swings, Jean and Siegle, Eduardo and Neto, Nils Edvin Asp and Leit{\~{a}}o, Sigrid Neumann and Coutinho, Ricardo and Mattoso, Marta and Salomon, Paulo S. and Valle, Rog{\'{e}}rio A. B. and Pereira, Renato Crespo and Kruger, Ricardo Henrique and Thompson, Cristiane and Thompson, Fabiano L.},
doi = {10.1093/database/bav088},
issn = {1758-0463},
journal = {Database},
month = {jan},
title = {{BaMBa: towards the integrated management of Brazilian marine environmental data}},
html = {http://database.oxfordjournals.org/content/2015/bav088.abstract},
pdf = {https://academic.oup.com/database/article-pdf/doi/10.1093/database/bav088/7299083/bav088.pdf},
volume = {2015},
year = {2015}
}
@article{Meirelles2015a,
abbr = {PLOS ONE},
author = {Meirelles, Pedro M. and Amado-Filho, Gilberto M. and Pereira-Filho, Guilherme H. and Pinheiro, Hudson T. and de Moura, Rodrigo L. and Joyeux, Jean-Christophe and Mazzei, Eric F. and Bastos, Alex C. and Edwards, Robert A. and Dinsdale, Elizabeth and Paranhos, Rodolfo and Santos, Eidy O. and Iida, Tetsuya and Gotoh, Kazuyoshi and Nakamura, Shota and Sawabe, Tomoo and Rezende, Carlos E. and Gadelha, Luiz and Francini-Filho, Ronaldo B. and Thompson, Cristiane and Thompson, Fabiano L.},
doi = {10.1371/journal.pone.0130084},
editor = {Chen, Chaolun Allen},
issn = {1932-6203},
journal = {PLOS ONE},
month = {jun},
number = {6},
pages = {e0130084},
title = {{Baseline Assessment of Mesophotic Reefs of the Vit{\'{o}}ria-Trindade Seamount Chain Based on Water Quality, Microbial Diversity, Benthic Cover and Fish Biomass Data}},
html = {http://dx.plos.org/10.1371/journal.pone.0130084},
pdf = {http://www.plosone.org/article/fetchObject.action?uri=info:doi/10.1371/journal.pone.0130084&representation=PDF},
volume = {10},
year = {2015}
}
@incollection{Gadelha2014,
author = {Gadelha, Luiz and Mattoso, Marta},
abbr = {IPAW},
selected = {true},
booktitle = {Provenance and Annotation of Data and Processes. IPAW 2014. Lecture Notes in Computer Science, vol. 8628},
doi = {10.1007/978-3-319-16462-5_11},
isbn = {978-3-319-16461-8},
pages = {139--151},
publisher = {Springer},
title = {{Applying Provenance to Protect Attribution in Distributed Computational Scientific Experiments}},
html = {http://dx.doi.org/10.1007/978-3-319-16462-5_11},
pdf = {https://link.springer.com/content/pdf/10.1007/978-3-319-16462-5_11.pdf},
volume = {8628},
year = {2015}
}
@article{Zorrilla2014,
abbr = {Data Sci. J.},
author = {Zorrilla, R. and Poltosi, M. and Gadelha, L. and Porto, F. and Moura, A. and Dalto, A. and Lavrado, H. P. and Valentin, Y. and Ten{\'{o}}rio, M. and Xavier, E.},
doi = {10.2481/dsj.IFPDA-04},
issn = {1683-1470},
journal = {Data Science Journal},
month = {oct},
pages = {PDA20--PDA26},
title = {{Conceptual View Representation of the Brazilian Information System on Antarctic Environmental Research}},
html = {https://www.jstage.jst.go.jp/article/dsj/13/0/13_IFPDA-04/_article},
pdf = {https://www.jstage.jst.go.jp/article/dsj/13/0/13_IFPDA-04/_pdf},
volume = {13},
year = {2014}
}
@inproceedings{Gadelha2014a,
abbr = {BreSci},
author = {Gadelha, Luiz and Guimar{\~{a}}es, Pedro and Moura, Ana Maria and Drucker, Debora P. and Dalcin, Eduardo and Gall, Guilherme and Tavares, Jurandir and Palazzi, Daniele and Poltosi, Maira and Porto, Fabio and Moura, Francisco and Leo, Wagner Vieira},
booktitle = {VIII Brazilian e-Science Workshop (BRESCI 2014). Proc. XXXIV Congress of the Brazilian Computer Society},
title = {{SiBBr: Uma Infraestrutura para Coleta, Integra{\c{c}}{\~{a}}o e An{\'{a}}lise de Dados sobre a Biodiversidade Brasileira}},
html = {https://sol.sbc.org.br/index.php/bresci/article/view/10477},
pdf = {https://sol.sbc.org.br/index.php/bresci/article/view/10477/10344},
year = {2014}
}
@inproceedings{Gadelha2013,
author = {Gadelha, Luiz and Wilde, Michael and Mattoso, Marta and Foster, Ian},
abbr = {EDBT/ICDT WS},
booktitle = {Proceedings of the Joint EDBT/ICDT 2013 Workshops on - EDBT '13},
doi = {10.1145/2457317.2457374},
isbn = {9781450315999},
month = {mar},
pages = {325},
publisher = {ACM},
title = {{Provenance traces of the Swift parallel scripting system}},
html = {http://dl.acm.org/citation.cfm?id=2457317.2457374},
year = {2013}
}
@phdthesis{GadelhaJr.2012,
abbr = {PESC-UFRJ},
author = {Gadelha, L.},
school = {Universidade Federal do Rio de Janeiro},
title = {{Ger{\^{e}}ncia de Proveni{\^{e}}ncia em Workflows Cient{\'{i}}ficos Paralelos e Distribu{\'{i}}dos}},
html = {https://www.cos.ufrj.br/index.php/pt-BR/publicacoes-pesquisa/details/15/2331},
pdf = {https://www.cos.ufrj.br/uploadfile/1347903453.pdf},
year = {2012}
}
@inproceedings{Gadelha2012a,
abbr = {ISEI},
author = {Gadelha, L. and Stanzani, S. and Corr{\^{e}}a, P. and Dalcin, E. and Gomes, C. and Sato, L. and Siqueira, M.},
booktitle = {Proc. 8th International Conference on Ecological Informatics (ISEI 2012)},
pages = {222--224},
title = {{Scalable and Provenance-Enabled Scientific Workflows for Predicting Distribution of Species.}},
year = {2012}
}
@article{Gadelha2012,
abbr = {DPD},
selected = {true},
author = {Gadelha, Luiz and Wilde, Michael and Mattoso, Marta and Foster, Ian},
doi = {10.1007/s10619-012-7104-4},
issn = {0926-8782},
journal = {Distributed and Parallel Databases},
month = {aug},
number = {5-6},
pages = {351--370},
title = {{MTCProv: a practical provenance query framework for many-task scientific computing}},
html = {http://link.springer.com/10.1007/s10619-012-7104-4},
volume = {30},
year = {2012}
}
@inproceedings{Gadelha2011a,
abbr = {TaPP},
author = {Gadelha, L. and Mattoso, M. and Wilde, M. and Foster, I.},
booktitle = {3rd USENIX Workshop on the Theory and Practice of Provenance (TaPP 2011)},
title = {{Provenance Query Patterns for Many-Task Scientific Computing}},
html = {http://www.usenix.org/events/tapp11/tech/final_files/GadelhaUpdate.pdf},
year = {2011}
}
@inproceedings{Gadelha2011,
abbr = {HPCDB},
author = {Gadelha, Luiz and Wilde, Michael and Mattoso, Marta and Foster, Ian},
booktitle = {Proc. of the 1st Annual Workshop on High Performance Computing meets Databases - HPCDB '11},
doi = {10.1145/2125636.2125643},
isbn = {9781450311571},
month = {nov},
pages = {17--20},
publisher = {ACM Press},
title = {{Exploring provenance in high performance scientific computing}},
html = {http://dl.acm.org/citation.cfm?id=2125636.2125643},
year = {2011}
}
@article{GadelhaJr.2011,
abbr = {FGCS},
abstract = {The Swift parallel scripting language allows for the specification, execution and analysis of large-scale computations in parallel and distributed environments. It incorporates a data model for recording and querying provenance information. In this article we describe these capabilities and evaluate the interoperability with other systems through the use of the Open Provenance Model. We describe Swift's provenance data model and compare it to the Open Provenance Model. We also describe and evaluate activities performed within the Third Provenance Challenge, which consisted of implementing a specific scientific workflow, capturing and recording provenance information of its execution, performing provenance queries, and exchanging provenance information with other systems. Finally, we propose improvements to both the Open Provenance Model and Swift's provenance system.},
author = {Gadelha, Luiz and Clifford, Ben and Mattoso, Marta and Wilde, Michael and Foster, Ian},
doi = {10.1016/j.future.2010.05.003},
issn = {0167739X},
journal = {Future Generation Computer Systems},
month = {jun},
number = {6},
pages = {775--780},
title = {{Provenance management in Swift}},
html = {http://www.sciencedirect.com/science/article/pii/S0167739X1000083X},
volume = {27},
year = {2011}
}
@inproceedings{Gadelha2010,
abbr = {IPAW},
author = {Gadelha, L. and Mattoso, M. and Wilde, M. and Foster, I.},
booktitle = {Provenance and Annotation of Data and Processes - The Third Provenance and Annotation Workshop (IPAW 2010)},
doi = {10.1007/978-3-642-17819-1_32},
pages = {277--279},
title = {{Towards a Threat Model for Provenance in e-Science}},
html = {http://link.springer.com/chapter/10.1007/978-3-642-17819-1_32},
year = {2010}
}
@techreport{Gadelha2010a,
abbr = {ANL-TR},
author = {Gadelha, L. and Clifford, B. and Mattoso, M. and Wilde, M. and Foster., I.},
institution = {Technical Report ANL/MCS-TM-311, Argonne National Laboratory},
title = {{Provenance Management in Swift with Implementation Details}},
pdf = {http://www.mcs.anl.gov/papers/TM-311-final.pdf},
year = {2010}
}
@inproceedings{GadelhaJr2008,
abbr = {eScience},
abstract = {Secure provenance techniques are essential in generating trustworthy provenance records, where one is interested in protecting their integrity, confidentiality, and availability. In this work, we suggest an architecture to provide protection of authorship and temporal information in grid-enabled provenance systems. It can be used in the resolution of conflicting intellectual property claims, and in the reliable chronological reconstitution of scientific experiments. We observe that some techniques from public key infrastructures can be readily applied for this purpose. We discuss the issues involved in the implementation of such architecture and describe some experiments realized with the proposed techniques.},
author = {Gadelha, Luiz and Mattoso, Marta},
booktitle = {2008 IEEE Fourth International Conference on eScience},
doi = {10.1109/eScience.2008.161},
isbn = {978-1-4244-3380-3},
month = {dec},
pages = {597--602},
publisher = {IEEE},
shorttitle = {eScience, 2008. eScience '08. IEEE Fourth Internat},
title = {{Kairos: An Architecture for Securing Authorship and Temporal Information of Provenance Data in Grid-Enabled Workflow Management Systems}},
html = {http://ieeexplore.ieee.org/lpdocs/epic03/wrapper.htm?arnumber=4736871},
year = {2008}
}
@inproceedings{Cruz2008,
abbr = {CCGrid},
abstract = {Monitoring the execution of distributed tasks within the workflow execution is not easy and is frequently controlled manually. This work presents a lightweight middleware monitor to design and control the parallel execution of tasks from a distributed scientific workflow. This middleware can be connected into a workflow management system. This middleware implementation is evaluated with the Kepler workflow management system, by including new modules to control and monitor the distributed execution of the tasks. These middleware modules were added to a bio informatics workflow to monitor parallel BLAST executions. Results show potential to high performance process execution while preserving the original features of the workflow.},
author = {da Cruz, Sergio Manuel Serra and da Silva, Fabricio Nogueira and Gadelha, Luiz and Cavalcanti, Maria Claudia Reis and Campos, Maria Luiza M. and Mattoso, Marta},
booktitle = {2008 Eighth IEEE International Symposium on Cluster Computing and the Grid (CCGRID)},
doi = {10.1109/CCGRID.2008.89},
month = {may},
pages = {693--698},
publisher = {IEEE},
shorttitle = {Cluster Computing and the Grid, 2008. CCGRID '08.},
title = {{A Lightweight Middleware Monitor for Distributed Scientific Workflows}},
html = {http://ieeexplore.ieee.org/lpdocs/epic03/wrapper.htm?arnumber=4534285},
year = {2008}
}
@inproceedings{Gadelha2007,
abbr = {MGC},
address = {New York, New York, USA},
author = {Gadelha, Luiz and Schulze, Bruno},
booktitle = {Proceedings of the 5th international workshop on Middleware for grid computing held at the ACM/IFIP/USENIX 8th International Middleware Conference - MGC '07},
doi = {10.1145/1376849.1376859},
isbn = {9781595939449},
month = {nov},
pages = {1--6},
publisher = {ACM},
title = {{On the management of grid credentials}},
html = {http://dl.acm.org/citation.cfm?id=1376849.1376859},
year = {2007}
}
@inproceedings{Gadelha2006,
abbr = {WCGA},
author = {Gadelha, L.},
booktitle = {Proc. IV Workshop de Computa{\c{c}}{\~{a}}o em Grid e Aplica{\c{c}}{\~{o}}es (WCGA 2006)},
pages = {155--156},
title = {{Specification of a MyProxy Plugin for Mozilla}},
html = {http://bibliotecadigital.sbc.org.br/download.php?paper=2286},
year = {2006}
}
@inproceedings{Gadelha2001,
abbr = {SCCC},
abstract = {We present an efficient strategy for the application of the inference rules of a completion procedure for finitely presented groups. This procedure has been proposed by Cremanns and Otto and uses a combinatorial structure called word-cycle. Our strategy is complete in the sense that a set of persistent word-cycles can be used to solve the reduced word problem of the finitely presented group given as input. This procedure has been implemented in order to experiment with various examples of groups},
author = {Gadelha, L. and Ayala-Rincon, M.},
booktitle = {SCCC 2001. 21st International Conference of the Chilean Computer Science Society},
doi = {10.1109/SCCC.2001.972635},
isbn = {0-7695-1396-4},
issn = {1522-4902},
pages = {80--85},
publisher = {IEEE Comput. Soc},
shorttitle = {Computer Science Society, 2001. SCCC '01. Proceedi},
title = {{An efficient strategy for word-cycle completion in finitely presented groups}},
html = {http://ieeexplore.ieee.org/lpdocs/epic03/wrapper.htm?arnumber=972635},
year = {2001}
}
@article{Gadelha1998,
abbr = {Revista SCCC},
author = {Gadelha, Luiz and Ayala-Rinc{\'{o}}n, Mauricio},
journal = {La Revista de La Sociedad Chilena de Ciencia de La Computaci{\'{o}}n},
number = {1},
pages = {14--23},
title = {{Some Applications of (Semi-)Decision Algorithms for Presburger Arithmetic in Automated Deduction based on Rewriting Techniques}},
volume = {2},
year = {1998}
}
@inproceedings{Gadelha1997,
abbr = {CLEI},
author = {Gadelha, L. and Ayala-Rinc{\'{o}}n, M.},
booktitle = {Proc. XXIII Latin American Conference on Informatics (CLEI)},
title = {{Applications of Decision Algorithms for Presburger Arithmetic in Rewrite Automated Deduction}},
html = {http://citeseerx.ist.psu.edu/viewdoc/download;jsessionid=B2552C9F9A4D2998F68207217096A970?doi=10.1.1.51.9473&rep=rep1&type=pdf},
year = {1997}
}