-
Notifications
You must be signed in to change notification settings - Fork 0
/
reference.bib
820 lines (730 loc) · 39.8 KB
/
reference.bib
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
@INPROCEEDINGS{5655011,
author={ {Li Jun} and {Li Ling}},
booktitle={2010 International Conference on Intelligent Computing and Integrated Systems},
title={Comparative research on Python speed optimization strategies},
year={2010},
volume={},
number={},
pages={57-59},
doi={10.1109/ICISS.2010.5655011}
}
@Article{NumpyArray,
author={Harris, Charles R.
and Millman, K. Jarrod
and van der Walt, St{\'e}fan J.
and Gommers, Ralf
and Virtanen, Pauli
and Cournapeau, David
and Wieser, Eric
and Taylor, Julian
and Berg, Sebastian
and Smith, Nathaniel J.
and Kern, Robert
and Picus, Matti
and Hoyer, Stephan
and van Kerkwijk, Marten H.
and Brett, Matthew
and Haldane, Allan
and del R{\'i}o, Jaime Fern{\'a}ndez
and Wiebe, Mark
and Peterson, Pearu
and G{\'e}rard-Marchant, Pierre
and Sheppard, Kevin
and Reddy, Tyler
and Weckesser, Warren
and Abbasi, Hameer
and Gohlke, Christoph
and Oliphant, Travis E.},
title={Array programming with NumPy},
journal={Nature},
year={2020},
month={Sep},
day={01},
volume={585},
number={7825},
pages={357-362},
abstract={Array programming provides a powerful, compact and expressive syntax for accessing, manipulating and operating on data in vectors, matrices and higher-dimensional arrays. NumPy is the primary array programming library for the Python language. It has an essential role in research analysis pipelines in fields as diverse as physics, chemistry, astronomy, geoscience, biology, psychology, materials science, engineering, finance and economics. For example, in astronomy, NumPy was an important part of the software stack used in the discovery of gravitational waves1 and in the first imaging of a black hole2. Here we review how a few fundamental array concepts lead to a simple and powerful programming paradigm for organizing, exploring and analysing scientific data. NumPy is the foundation upon which the scientific Python ecosystem is constructed. It is so pervasive that several projects, targeting audiences with specialized needs, have developed their own NumPy-like interfaces and array objects. Owing to its central position in the ecosystem, NumPy increasingly acts as an interoperability layer between such array computation libraries and, together with its application programming interface (API), provides a flexible framework to support the next decade of scientific and industrial analysis.},
issn={1476-4687},
doi={10.1038/s41586-020-2649-2},
url={https://doi.org/10.1038/s41586-020-2649-2}
}
@misc{wiki:python,
author = "{Wikipedia contributors}",
title = "Python (programming language) --- {Wikipedia}{,} The Free Encyclopedia",
year = "2021",
howpublished = "\url{https://en.wikipedia.org/w/index.php?title=Python_(programming_language)&oldid=999718546}",
note = "[Online; accessed 11-January-2021]"
}
@misc{Py:cProfile,
author = "{Python Developer team}",
title = "The Python Profilers",
year = "2021",
howpublished = "\url{https://docs.python.org/3/library/profile.html#the-python-profilers}",
note = "[Online; accessed 11-January-2021]"
}
@misc{Py:pickle,
author = "{Python Developer team}",
title = "The Python Profilers",
year = "2021",
howpublished = "\url{https://docs.python.org/3/library/pickle.html}",
note = "[Online; accessed 04-April-2021]"
}
@misc{Py:filter,
author = "{Python Developer team}",
title = "The Python Profilers",
year = "2021",
howpublished = "\url{https://docs.python.org/3/library/functions.html#filter}",
note = "[Online; accessed 11-January-2021]"
}
@misc{Py:map,
author = "{Python Developer team}",
title = "The Python Profilers",
year = "2021",
howpublished = "\url{https://docs.python.org/3/library/functions.html#map}",
note = "[Online; accessed 11-January-2021]"
}
@misc{Py:listComprehension,
author = "{Python Developer team}",
title = "The Python Profilers",
year = "2021",
howpublished = "\url{https://docs.python.org/3/tutorial/datastructures.html#list-comprehensions}",
note = "[Online; accessed 11-January-2021]"
}
@misc{matProfile,
title = "Profile",
year = "2020",
howpublished = "\url{https://nl.mathworks.com/help/matlab/ref/profile.html}",
note = "[Online; accessed 11-January-2021]"
}
@misc{wiki:HashTable,
author = "{Wikipedia contributors}",
title = "Hash table --- {Wikipedia}{,} The Free Encyclopedia",
year = "2021",
howpublished = "\url{https://en.wikipedia.org/w/index.php?title=Hash_table&oldid=999105548}",
note = "[Online; accessed 11-January-2021]"
}
@misc{matEval,
title = "Alternatives to the eval Function",
year = "2020",
howpublished = "\url{https://nl.mathworks.com/help/matlab/matlab_prog/string-evaluation.html}",
note = "[Online; accessed 11-January-2021]"
}
@misc{wiki:SVM,
author = "{Wikipedia contributors}",
title = "Support-vector machine --- {Wikipedia}{,} The Free Encyclopedia",
year = "2020",
howpublished = "\url{https://en.wikipedia.org/w/index.php?title=Support-vector_machine&oldid=997327362}",
note = "[Online; accessed 11-January-2021]"
}
@Article{Cortes1995,
author={Cortes, Corinna
and Vapnik, Vladimir},
title={Support-vector networks},
journal={Machine Learning},
year={1995},
month={Sep},
day={01},
volume={20},
number={3},
pages={273-297},
abstract={Thesupport-vector network is a new learning machine for two-group classification problems. The machine conceptually implements the following idea: input vectors are non-linearly mapped to a very high-dimension feature space. In this feature space a linear decision surface is constructed. Special properties of the decision surface ensures high generalization ability of the learning machine. The idea behind the support-vector network was previously implemented for the restricted case where the training data can be separated without errors. We here extend this result to non-separable training data.},
issn={1573-0565},
doi={10.1007/BF00994018},
url={https://doi.org/10.1007/BF00994018}
}
@Inbook{pythonBook,
title="Advanced Python",
bookTitle="Python Scripting for Computational Science",
year="2006",
publisher="Springer Berlin Heidelberg",
address="Berlin, Heidelberg",
pages="313--441",
isbn="978-3-540-31269-7",
doi="10.1007/3-540-31269-2_8",
url="https://doi.org/10.1007/3-540-31269-2_8"
}
@misc{WhyMatlab,
title = "MATLAB vs. Python: Top Reasons to Choose MATLAB",
year = "2021",
howpublished = "\url{https://nl.mathworks.com/products/matlab/matlab-vs-python.html}",
note = "[Online; accessed 12-January-2021]"
}
@article{ARUOBA2015265,
title = "A comparison of programming languages in macroeconomics",
journal = "Journal of Economic Dynamics and Control",
volume = "58",
pages = "265 - 273",
year = "2015",
issn = "0165-1889",
doi = "https://doi.org/10.1016/j.jedc.2015.05.009",
url = "http://www.sciencedirect.com/science/article/pii/S0165188915000883",
author = "S. Borağan Aruoba and Jesús Fernández-Villaverde",
keywords = "Dynamic equilibrium economies, Computational methods, Programming languages",
abstract = "We solve the stochastic neoclassical growth model, the workhorse of modern macroeconomics, using C++14, Fortran 2008, Java, Julia, Python, Matlab, Mathematica, and R. We implement the same algorithm, value function iteration, in each of the languages. We report the execution times of the codes in a Mac and in a Windows computer and briefly comment on the strengths and weaknesses of each language."
}
@INPROCEEDINGS{EMMatVsPy,
author={A. {Weiss} and A. {Elsherbeni}},
booktitle={2020 International Applied Computational Electromagnetics Society Symposium (ACES)},
title={Computational Performance of MATLAB and Python for Electromagnetic Applications},
year={2020},
volume={},
number={},
pages={1-2},
doi={10.23919/ACES49320.2020.9196078}
}
@misc{Numpy:Vectorization,
title = "Array Broadcasting in Numpy",
year = "2020",
howpublished = "\url{https://numpy.org/doc/stable/user/theory.broadcasting.html}",
note = "[Online; accessed 13-January-2021]"
}
@misc{Numpy:OpenBLAS,
title = "NumPy packages \& accelerated linear algebra libraries",
year = "2020",
howpublished = "\url{https://numpy.org/install/}",
note = "[Online; accessed 01-April-2021]"
}
@misc{Numpy:Fast,
title = "Why is NumPy Fast?",
year = "2020",
howpublished = "\url{https://numpy.org/doc/stable/user/whatisnumpy.html#why-is-numpy-fast}",
note = "[Online; accessed 10-April-2021]"
}
@misc{wiki:LoopUnroll,
author = "{Wikipedia contributors}",
title = "Loop unrolling --- {Wikipedia}{,} The Free Encyclopedia",
year = "2021",
howpublished = "\url{https://en.wikipedia.org/w/index.php?title=Loop_unrolling&oldid=997858676}",
note = "[Online; accessed 13-January-2021]"
}
@misc{agner:LoopUnroll,
author = {Fog,Agner},
title = "Optimizing subroutines in assembly language",
year = "2021",
howpublished = "\url{https://www.agner.org/optimize/optimizing_assembly.pdf}",
note = "[Online; accessed 27-April-2021]"
}
@book{oreilly,
author={Gorelick,Micha and Ozsvald,Ian},
year={2020},
title={High Performance Python, 2nd Edition},
publisher={O'Reilly Media, Inc},
address={S.l.},
edition={2},
abstract={Your Python code may run correctly, but you need it to run faster. Updated for Python 3, this expanded edition shows you how to locate performance bottlenecks and significantly speed up your code in high-data-volume programs. By exploring the fundamental theory behind design choices, High Performance Python helps you gain a deeper understanding of Python’s implementation.How do you take advantage of multicore architectures or clusters? Or build a system that scales up and down without losing reliability? Experienced Python programmers will learn concrete solutions to many issues, along with war stories from companies that use high-performance Python for social media analytics, productionized machine learning, and more.Get a better grasp of NumPy, Cython, and profilersLearn how Python abstracts the underlying computer architectureUse profiling to find bottlenecks in CPU time and memory usageWrite efficient programs by choosing appropriate data structuresSpeed up matrix and vector computationsUse tools to compile Python down to machine codeManage multiple I/O and computational operations concurrentlyConvert multiprocessing code to run on local or remote clustersDeploy code faster using tools like Docker;Your Python code may run correctly, but you need it to run faster. Updated for Python 3, this expanded edition shows you how to locate performance bottlenecks and significantly speed up your code in high-data-volume programs. By exploring the fundamental theory behind design choices, High Performance Python helps you gain a deeper understanding of Python’s implementation. How do you take advantage of multicore architectures or clusters? Or build a system that scales up and down without losing reliability? Experienced Python programmers will learn concrete solutions to many issues, along with war stories from companies that use high-performance Python for social media analytics, productionized machine learning, and more. Get a better grasp of NumPy, Cython, and profilers Learn how Python abstracts the underlying computer architecture Use profiling to find bottlenecks in CPU time and memory usage Write efficient programs by choosing appropriate data structures Speed up matrix and vector computations Use tools to compile Python down to machine code Manage multiple I/O and computational operations concurrently Convert multiprocessing code to run on local or remote clusters Deploy code faster using tools like Docker;},
keywords={CoreOs; Python},
isbn={1492055026;9781492055020;9781492055013;1492055018;},
language={English},
chapter = 4,
}
@inbook{oreillyCh4,
author={Gorelick,Micha and Ozsvald,Ian},
year={2020},
title={High Performance Python, 2nd Edition},
publisher={O'Reilly Media, Inc},
address={S.l.},
edition={2},
keywords={CoreOs; Python},
isbn={1492055026;9781492055020;9781492055013;1492055018;},
language={English},
chapter={Chapter 4. Dictionaries and Sets},
}
@inbook{oreillyCh10,
author={Gorelick,Micha and Ozsvald,Ian},
year={2020},
title={High Performance Python, 2nd Edition},
publisher={O'Reilly Media, Inc},
address={S.l.},
edition={2},
keywords={CoreOs; Python},
isbn={1492055026;9781492055020;9781492055013;1492055018;},
language={English},
chapter={Chapter 10. Clusters and Job Queues},
}
@Inbook{Silaparasetty2020,
author="Silaparasetty, Nikita",
title="Machine Learning With Python",
bookTitle="Machine Learning Concepts with Python and the Jupyter Notebook Environment: Using Tensorflow 2.0",
year="2020",
publisher="Apress",
address="Berkeley, CA",
pages="67--87",
abstract="In previous chapters, we saw what artificial intelligence is and how machine learning and deep learning techniques are used to train machines to become smart. In these next few chapters, we will learn how machines are trained to take data, process it, analyze it, and develop inferences from it.",
isbn="978-1-4842-5967-2",
doi="10.1007/978-1-4842-5967-2_5",
url="https://doi.org/10.1007/978-1-4842-5967-2_5",
chapter={5. Machine Learning With Python}
}
@misc{PythonClusterFrameworks,
title = "Parallel Processing and Multiprocessing in Python",
year = "2020",
howpublished = "\url{https://wiki.python.org/moin/ParallelProcessing}",
note = "[Online; accessed 20-January-2021]"
}
@misc{MNEVersusFielder,
title = "Background",
year = "2021",
howpublished = "\url{https://www.fieldtriptoolbox.org/development/project/integrate_with_mne/#background}",
note = "[Online; accessed 11-April-2021]"
}
@misc{wiki:parallelComputing,
author = "{Wikipedia contributors}",
title = "Parallel computing --- {Wikipedia}{,} The Free Encyclopedia",
year = "2021",
howpublished = "\url{https://en.wikipedia.org/w/index.php?title=Parallel_computing&oldid=1000667608}",
note = "[Online; accessed 20-January-2021]"
}
@misc{wiki:distributedComputing,
author = "{Wikipedia contributors}",
title = "Distributed computing --- {Wikipedia}{,} The Free Encyclopedia",
year = "2020",
howpublished = "\url{https://en.wikipedia.org/w/index.php?title=Distributed_computing&oldid=991259366}",
note = "[Online; accessed 20-January-2021]"
}
@misc{exax:Accelerator,
title = "About",
year="2016",
howpublished = "\url{https://expertmakeraccelerator.org/about/}",
note = "[Online; accessed 10-February-2021]"
}
@misc{enwiki:crossvalidation,
author = "{Wikipedia contributors}",
title = "Cross-validation (statistics) --- {Wikipedia}{,} The Free Encyclopedia",
year = "2021",
howpublished = "\url{https://en.wikipedia.org/w/index.php?title=Cross-validation_(statistics)&oldid=1007001562}",
note = "[Online; accessed 17-February-2021]"
}
@misc{ray:delayget,
author = "{The Ray Team}",
title = "Tips for first-time users",
year = "2021",
howpublished = "\url{https://docs.ray.io/en/master/auto_examples/tips-for-first-time.html#tip-1-delay-ray-get}",
note = "[Online; accessed 01-March-2021]"
}
@misc{enwiki:avx,
author = "{Wikipedia contributors}",
title = "Advanced Vector Extensions --- {Wikipedia}{,} The Free Encyclopedia",
year = "2021",
howpublished = "\url{https://en.wikipedia.org/w/index.php?title=Advanced_Vector_Extensions&oldid=1010378804}",
note = "[Online; accessed 8-March-2021]"
}
@misc{AgnerFog:avx,
author = {Fog,Agner},
title = "The microarchitecture of Intel, AMD,and VIA CPUs",
year = "2021",
howpublished = "\url{https://www.agner.org/optimize/microarchitecture.pdf}",
note = "[Online; accessed 27-April-2021]"
}
@misc{Intel:avx,
author = {Lomont,Chris},
title = "Introduction to Intel® Advanced Vector Extensions",
year = "2021",
howpublished = "\url{https://software.intel.com/content/www/us/en/develop/articles/introduction-to-intel-advanced-vector-extensions.html}",
note = "[Online; accessed 27-April-2021]"
}
@ARTICLE{10.3389/fnins.2010.00198,
AUTHOR={Blankertz, Benjamin and Tangermann, Michael and Vidaurre, Carmen and Fazli, Siamac and Sannelli, Claudia and Haufe, Stefan and Maeder, Cecilia and Ramsey, Lenny and Sturm, Irene and Curio, Gabriel and Mueller, Klaus},
TITLE={The Berlin Brain–Computer Interface: Non-Medical Uses of BCI Technology},
JOURNAL={Frontiers in Neuroscience},
VOLUME={4},
PAGES={198},
YEAR={2010},
URL={https://www.frontiersin.org/article/10.3389/fnins.2010.00198},
DOI={10.3389/fnins.2010.00198},
ISSN={1662-453X},
ABSTRACT={Brain–computer interfacing (BCI) is a steadily growing area of research. While initially BCI research was focused on applications for paralyzed patients, increasingly more alternative applications in healthy human subjects are proposed and investigated. In particular, monitoring of mental states and decoding of covert user states have seen a strong rise of interest. Here, we present some examples of such novel applications which provide evidence for the promising potential of BCI technology for non-medical uses. Furthermore, we discuss distinct methodological improvements required to bring non-medical applications of BCI technology to a diversity of layperson target groups, e.g., ease of use, minimal training, general usability, short control latencies.}
}
@misc{nishihara2017realtime,
title={Real-Time Machine Learning: The Missing Pieces},
author={Robert Nishihara and Philipp Moritz and Stephanie Wang and Alexey Tumanov and William Paul and Johann Schleier-Smith and Richard Liaw and Mehrdad Niknami and Michael I. Jordan and Ion Stoica},
year={2017},
eprint={1703.03924},
archivePrefix={arXiv},
primaryClass={cs.DC}
}
@misc{moritz2018ray,
title={Ray: A Distributed Framework for Emerging AI Applications},
author={Philipp Moritz and Robert Nishihara and Stephanie Wang and Alexey Tumanov and Richard Liaw and Eric Liang and Melih Elibol and Zongheng Yang and William Paul and Michael I. Jordan and Ion Stoica},
year={2018},
eprint={1712.05889},
archivePrefix={arXiv},
primaryClass={cs.DC}
}
@misc{ray:whatIsRay,
author ="{The Ray Team}",
title = "A Gentle Introduction to Ray",
year = "2021",
howpublished = "\url{https://docs.ray.io/en/master/ray-overview/index.html}",
note = "[Online; accessed 26-March-2021]"
}
@misc{ray:RaySGD,
author = "{The Ray Team}",
title = "RaySGD: Distributed Training Wrappers",
year = "2021",
howpublished = "\url{https://docs.ray.io/en/master/raysgd/raysgd.html}",
note = "[Online; accessed 26-March-2021]"
}
@misc{ray:Rayrllib,
author = "{The Ray Team}",
title = "RLlib: Scalable Reinforcement Learning",
year = "2021",
howpublished = "\url{https://docs.ray.io/en/master/rllib.html}",
note = "[Online; accessed 26-March-2021]"
}
@misc{ray:RayTune,
author = "{The Ray Team}",
title = "Tune: Scalable Hyperparameter Tuning",
year = "2021",
howpublished = "\url{https://docs.ray.io/en/master/tune/index.html}",
note = "[Online; accessed 26-March-2021]"
}
@misc{ray:RaySklearn,
author = "{The Ray Team}",
title = "Distributed Scikit-learn / Joblib",
year = "2021",
howpublished = "\url{https://docs.ray.io/en/master/joblib.html}",
note = "[Online; accessed 26-March-2021]"
}
@misc{ray:DaskOnRay,
author = "{The Ray Team}",
title = "Dask on Ray",
year = "2021",
howpublished = "\url{https://docs.ray.io/en/master/dask-on-ray.html}",
note = "[Online; accessed 26-March-2021]"
}
@misc{ray:SystemDesign,
author = "{The Ray Team}",
title = "System design goals",
year = "2020",
howpublished = "\url{https://docs.google.com/document/d/1lAy0Owi-vPz2jEqBSaHNQcy2IBSDEHyXNOQZlGuj93c/preview#}",
pages = "4--4",
note = "[Online; accessed 26-March-2021]"
}
@misc{ray:Architecture,
author = "{The Ray Team}",
title = "Architecture Overview",
year = "2020",
howpublished = "\url{https://docs.google.com/document/d/1lAy0Owi-vPz2jEqBSaHNQcy2IBSDEHyXNOQZlGuj93c/preview#}",
pages = "5--8",
note = "[Online; accessed 26-March-2021]"
}
@misc{ray:GCS,
author = "{The Ray Team}",
title = "Global Control Store",
year = "2020",
howpublished = "\url{https://docs.google.com/document/d/1lAy0Owi-vPz2jEqBSaHNQcy2IBSDEHyXNOQZlGuj93c/preview#}",
pages = "31--33",
note = "[Online; accessed 26-March-2021]"
}
@misc{ray:Resources,
author = "{The Ray Team}",
title = "ray.remote",
year = "2021",
howpublished = "\url{https://docs.ray.io/en/master/package-ref.html#ray-remote}",
note = "[Online; accessed 26-March-2021]"
}
@misc{ray:PlacementGroup,
author = "{The Ray Team}",
title = "Placement Groups",
year = "2020",
howpublished = "\url{https://docs.ray.io/en/master/package-ref.html#placement-group}",
note = "[Online; accessed 26-March-2021]"
}
@misc{ray:remoteFunctions,
author = "{The Ray Team}",
title = "Remote functions (Tasks)",
year = "2020",
howpublished = "\url{https://docs.ray.io/en/master/walkthrough.html#remote-functions-tasks}",
note = "[Online; accessed 29-March-2021]"
}
@misc{ray:remoteObjects,
author = "{The Ray Team}",
title = "Objects in ray",
year = "2020",
howpublished = "\url{https://docs.ray.io/en/master/walkthrough.html#objects-in-ray}",
note = "[Online; accessed 29-March-2021]"
}
@misc{ray:FetchingResults,
author = "{The Ray Team}",
title = "Fetching Results",
year = "2020",
howpublished = "\url{https://docs.ray.io/en/master/walkthrough.html#fetching-results}",
note = "[Online; accessed 29-March-2021]"
}
@misc{ray:remoteClasses,
author = "{The Ray Team}",
title = "Remote Classes (Actors)",
year = "2020",
howpublished = "\url{https://docs.ray.io/en/master/walkthrough.html#remote-classes-actors}",
note = "[Online; accessed 29-March-2021]"
}
@misc{ray:rayCluster,
author = "{The Ray Team}",
title = "Ray Cluster Overview",
year = "2020",
howpublished = "\url{https://docs.ray.io/en/master/cluster/index.html}",
note = "[Online; accessed 29-March-2021]"
}
@misc{ray:OMPNUMTHREAD,
author = "{The Ray Team}",
title = "Cluster Resources",
year = "2020",
howpublished = "\url{https://docs.ray.io/en/master/configure.html#cluster-resources}",
note = "[Online; accessed 1-April-2021]"
}
@inbook{exax:sourceCode,
author={Berkeman,Anders and Drougge,Carl and Sofia Hörberg},
year={2020},
title={ExAx: TheAccelerator},
keywords={Accelerator; exax},
howpublished="\url{https://expertmakeraccelerator.org/pdf/acc_manual.pdf}",
language={English},
chapter={2.2.2 Jobs Can Only be Run Once},
note = "[Online; accessed 29-March-2021]"
}
@inbook{exax:parallelExecution,
author={Berkeman,Anders and Drougge,Carl and Sofia Hörberg},
year={2020},
title={ExAx: TheAccelerator},
keywords={Accelerator; exax},
howpublished="\url{https://expertmakeraccelerator.org/pdf/acc_manual.pdf}",
language={English},
chapter={2.4.2 Parallel Execution},
note = "[Online; accessed 29-March-2021]"
}
@misc{AA:apacheArrow,
author = "{The Apache Software Foundation}",
title = "Apache Arrow",
year = "2021",
howpublished = "\url{https://arrow.apache.org/}",
note = "[Online; accessed 30-March-2021]"
}
@misc{AA:Overview,
author = "{The Apache Software Foundation}",
title = "Columnar is Fast",
year = "2021",
howpublished = "\url{https://arrow.apache.org/overview/}",
note = "[Online; accessed 30-March-2021]"
}
@misc{enwiki:MISD,
author = "{Wikipedia contributors}",
title = "SIMD --- {Wikipedia}{,} The Free Encyclopedia",
year = "2021",
howpublished = "\url{https://en.wikipedia.org/w/index.php?title=SIMD&oldid=1007613409}",
note = "[Online; accessed 30-March-2021]"
}
@misc{GCC:MISD,
author = "{GCC team}",
title = "6.52 Using Vector Instructions through Built-in Functions",
year = "2021",
howpublished = "\url{https://gcc.gnu.org/onlinedocs/gcc/Vector-Extensions.html}",
note = "[Online; accessed 27-April-2021]"
}
@misc{Intel:MISD,
author = {{Marissa} du Bois and {Pete} Brubaker and {Dominic} Milano},
title = "Single Instruction Multiple Data Made Easy with Intel® Implicit SPMD Program Compiler",
year = "2019",
howpublished = "\url{https://software.intel.com/content/www/us/en/develop/articles/simd-made-easy-with-intel-ispc.html}",
note = "[Online; accessed 30-March-2021]"
}
@misc{enwiki:columnarData,
author = "{Wikipedia contributors}",
title = "Column-oriented DBMS --- {Wikipedia}{,} The Free Encyclopedia",
year = "2021",
howpublished = "\url{https://en.wikipedia.org/w/index.php?title=Column-oriented_DBMS&oldid=1005533076}",
note = "[Online; accessed 30-March-2021]"
}
@misc{AWS:columnarData,
author = "{Amazon Web Services, Inc.}",
title = "What is a Columnar Database?",
year = "2021",
howpublished = "\url{https://aws.amazon.com/nosql/columnar/}",
note = "[Online; accessed 27-April-2021]"
}
@inbook{DiskSlow,
author={Stallings,William},
year={2012},
title={Operating systems: internals and design principles},
publisher={Pearson},
address={Boston},
edition={7., internat.},
keywords={Betriebssystem; Operativsystem},
isbn={0273751506;013230998X;9780132309981;9780273751502;},
chapter={1.4 INTERRUPTS},
language={English},
}
@INPROCEEDINGS{1607248,
author={P. {Ratanaworabhan} and {Jian Ke} and M. {Burtscher}},
booktitle={Data Compression Conference (DCC'06)},
title={Fast lossless compression of scientific floating-point data},
year={2006},
volume={},
number={},
pages={133-142},
doi={10.1109/DCC.2006.35}
}
@inbook{ComputerMemoryHierarchy,
author={Hennessy,John L. and Patterson,David A. and Asanović,Krste},
year={2012;2011;},
title={Computer architecture: a quantitative approach},
publisher={Morgan Kaufmann/Elsevier},
address={Waltham, MA},
edition={5th},
abstract={Computer Architecture: A Quantitative Approach explores the ways that software and technology in the cloud are accessed by digital media, such as cell phones, computers, tablets, and other mobile devices. The book became a part of Intel's 2012 recommended reading list for developers, and it covers the revolution of mobile computing. The text also highlights the two most important factors in architecture today: parallelism and memory hierarchy. The six chapters that this book is composed of follow a consistent framework: explanation of the ideas in each chapter; a ""crosscutting issues"" section, which presents how the concepts covered in one chapter connect with those given in other chapters; a ""putting it all together"" section that links these concepts by discussing how they are applied in real machine; and detailed examples of misunderstandings and architectural traps commonly encountered by developers and architects. The first chapter of the book includes formulas for energy, static and dynamic power, integrated circuit costs, reliability, and availability. Chapter 2 discusses memory hierarchy and includes discussions about virtual machines, SRAM and DRAM technologies, and new material on Flash memory. The third chapter covers the exploitation of instruction-level parallelism in high-performance processors, superscalar execution, dynamic scheduling and multithreading, followed by an introduction to vector architectures in the fourth chapter. Chapters 5 and 6 describe multicore processors and warehouse-scale computers (WSCs), respectively. This book is an important reference for computer architects, programmers, application developers, compiler and system software developers, computer system designers and application developers.
Part of Intel's 2012 Recommended Reading List for DevelopersUpdated to cover the mobile computing revolutionEmphasizes the two most important topics in architecture today: memory hierarchy and parallelism in all its forms.Develops common themes throughout each chapter: power, performance, cost, dependability, protection, programming models, and emerging trends ("What's Next")Includes three review appendices in the printed text. Additional reference appendices are available online.Includes updated Case Studies and completely new exercises.;Computer Architecture: A Quantitative Approach explores the ways that software and technology in the cloud are accessed by digital media, such as cell phones, computers, tablets, and other mobile devices. The book became a part of Intel's 2012 recommended reading list for developers, and it covers the revolution of mobile computing. The text also highlights the two most important factors in architecture today: parallelism and memory hierarchy. The six chapters that this book is composed of follow a consistent framework: explanation of the ideas in each chapter; a ""crosscutting issues"" section, which presents how the concepts covered in one chapter connect with those given in other chapters; a ""putting it all together"" section that links these concepts by discussing how they are applied in real machine; and detailed examples of misunderstandings and architectural traps commonly encountered by developers and architects. The first chapter of the book includes formulas for energy, static and dynamic power, integrated circuit costs, reliability, and availability. Chapter 2 discusses memory hierarchy and includes discussions about virtual machines, SRAM and DRAM technologies, and new material on Flash memory. The third chapter covers the exploitation of instruction-level parallelism in high-performance processors, superscalar execution, dynamic scheduling and multithreading, followed by an introduction to vector architectures in the fourth chapter. Chapters 5 and 6 describe multicore processors and warehouse-scale computers (WSCs), respectively. This book is an important reference for computer architects, programmers, application developers, compiler and system software developers, computer system designers and application developers. Part of Intel's 2012 Recommended Reading List for Developers Updated to cover the mobile computing revolution
Emphasizes the two most important topics in architecture today: memory hierarchy and parallelism in all its forms. Develops common themes throughout each chapter: power, performance, cost, dependability, protection, programming models, and emerging trends ("What's Next") Includes three review appendices in the printed text. Additional reference appendices are available online. Includes updated Case Studies and completely new exercises.;},
keywords={Computer architecture},
isbn={9780123838728;012383872X;},
chapter={Chapter 2: Memory Hierarchy Design},
language={English},
}
@misc{enwiki:memoryHierarchy,
author = "{Wikipedia contributors}",
title = "Memory hierarchy --- {Wikipedia}{,} The Free Encyclopedia",
year = "2021",
howpublished = "\url{https://en.wikipedia.org/w/index.php?title=Memory_hierarchy&oldid=1002366093}",
note = "[Online; accessed 30-March-2021]"
}
@article{cFaster,
author={Prechelt,L.},
year={2000},
title={An empirical comparison of seven programming languages},
journal={Computer (Long Beach, Calif.)},
volume={33},
number={10},
pages={23-29},
abstract={Often heated, debates regarding different programming languages' effectiveness remain inconclusive because of scarce data and a lack of direct comparisons. The author addresses that challenge, comparatively analyzing 80 implementations of the phone-code program in seven different languages (C, C++, Java, Perl, Python, Rexx and Tcl). Further, for each language, the author analyzes several separate implementations by different programmers. The comparison investigates several aspects of each language, including program length, programming effort, runtime efficiency, memory consumption, and reliability. The author uses comparisons to present insight into program language performance.},
keywords={Computer languages; Java; Runtime; Program processors; Production; Read-write memory; Workstations; Sun; Statistics; Programming profession; Usage; Programming languages},
isbn={0018-9162},
language={English},
}
@misc{enwiki:AutoParallelization,
author = "{Wikipedia contributors}",
title = "Automatic parallelization --- {Wikipedia}{,} The Free Encyclopedia",
year = "2021",
howpublished = "\url{https://en.wikipedia.org/w/index.php?title=Automatic_parallelization&oldid=1008941284}",
note = "[Online; accessed 30-March-2021]"
}
@misc{enwiki:parallelization,
author = "{Wikipedia contributors}",
title = "Parallel computing --- {Wikipedia}{,} The Free Encyclopedia",
year = "2021",
howpublished = "\url{https://en.wikipedia.org/w/index.php?title=Parallel_computing&oldid=1008901597}",
note = "[Online; accessed 30-March-2021]"
}
@misc{mat:eval,
author = "{The MathWorks, Inc.}",
title = "eval",
year = "2021",
howpublished = "\url{https://www.mathworks.com/help/matlab/ref/eval.html}",
note = "[Online; accessed 31-March-2021]"
}
@misc{OpenBLAS:threadSafe,
author = "{seberg}",
title = "OpenBLAS threadsafety issues for downstream libraries (NumPy)",
year = "2021",
howpublished = "\url{https://github.com/xianyi/OpenBLAS/issues/1844}",
note = "[Online; accessed 1-April-2021]"
}
@misc{enwiki:1015447262,
author = "{Wikipedia contributors}",
title = "Disk buffer --- {Wikipedia}{,} The Free Encyclopedia",
year = "2021",
howpublished = "\url{https://en.wikipedia.org/w/index.php?title=Disk_buffer&oldid=1015447262}",
note = "[Online; accessed 10-April-2021]"
}
@misc{diskbuffer,
author = "{The kernel development community}",
title = "Explicit volatile write back cache control",
year = "2021",
howpublished = "\url{https://www.kernel.org/doc/Documentation/block/writeback_cache_control.txt}",
note = "[Online; accessed 27-April-2021]"
}
@inbook{stackFrame,
author={Miller,Bradley N. and Ranum,David L.},
year={2006},
title={Problem solving with algorithms and data structures using Python},
publisher={Franklin, Beedle & Associates},
address={Wilsonville, Or},
keywords={Programspråk; Särskilda programspråk; Python (Computer program language); Computer algorithms; Data structures (Computer science); Python},
isbn={9781590280539;1590280539;},
chapter={5.6. Stack Frames: Implementing Recursion},
language={English},
}
@misc{enwiki:1011877534,
author = "{Wikipedia contributors}",
title = "Profiling (computer programming) --- {Wikipedia}{,} The Free Encyclopedia",
year = "2021",
howpublished = "\url{https://en.wikipedia.org/w/index.php?title=Profiling_(computer_programming)&oldid=1011877534}",
note = "[Online; accessed 11-April-2021]"
}
@misc{Mat73HDF5,
author = "{The MathWorks, Inc.}",
title = "MAT-File Versions",
year = "2021",
howpublished = "\url{https://www.mathworks.com/help/matlab/import_export/mat-file-versions.html}",
note = "[Online; accessed 12-April-2021]"
}
@misc{WhatIsHDF5,
author = "{The HDF Group}",
title = "HDF5",
year = "2006",
howpublished = "\url{https://www.hdfgroup.org/solutions/hdf5/}",
note = "[Online; accessed 12-April-2021]"
}
@misc{SVMSVC,
author = "{scikit-learn developers}",
title = "sklearn.svm.SVC",
year = "2020",
howpublished = "\url{https://scikit-learn.org/stable/modules/generated/sklearn.svm.SVC.html?highlight=svc#sklearn-svm-svc}",
note = "[Online; accessed 12-April-2021]"
}
@misc{LinearSVC,
author = "{scikit-learn developers}",
title = "sklearn.svm.LinearSVC",
year = "2020",
howpublished = "\url{https://scikit-learn.org/stable/modules/generated/sklearn.svm.LinearSVC.html?highlight=svc#sklearn.svm.LinearSVC}",
note = "[Online; accessed 12-April-2021]"
}
@Article{Oostenveld2010,
author={Oostenveld, Robert
and Fries, Pascal
and Maris, Eric
and Schoffelen, Jan-Mathijs},
title={FieldTrip: Open Source Software for Advanced Analysis of MEG, EEG, and Invasive Electrophysiological Data},
journal={Computational Intelligence and Neuroscience},
year={2010},
month={Dec},
day={23},
publisher={Hindawi Publishing Corporation},
volume={2011},
pages={156869},
abstract={This paper describes FieldTrip, an open source software package that we developed for the analysis of MEG, EEG, and other electrophysiological data. The software is implemented as a MATLAB toolbox and includes a complete set of consistent and user-friendly high-level functions that allow experimental neuroscientists to analyze experimental data. It includes algorithms for simple and advanced analysis, such as time-frequency analysis using multitapers, source reconstruction using dipoles, distributed sources and beamformers, connectivity analysis, and nonparametric statistical permutation tests at the channel and source level. The implementation as toolbox allows the user to perform elaborate and structured analyses of large data sets using the MATLAB command line and batch scripting. Furthermore, users and developers can easily extend the functionality and implement new algorithms. The modular design facilitates the reuse in other software packages.},
issn={1687-5265},
doi={10.1155/2011/156869},
url={https://doi.org/10.1155/2011/156869}
}
@article{superlinearSPEED,
author={Ristov,Sasko and Prodan,Radu and Gusev,Marjan and Skala,Karolj},
year={2016},
title={Superlinear Speedup in HPC Systems: why and when?},
journal={Annals of Computer Science and Information Systems},
volume={8},
pages={889-898},
isbn={2300-5963},
language={English},
}
@article {BramoENEURO.0251-18.2018,
author = {Bram{\~a}o, In{\^e}s and Johansson, Mikael},
title = {Neural Pattern Classification Tracks Transfer-Appropriate Processing in Episodic Memory},
volume = {5},
number = {4},
elocation-id = {ENEURO.0251-18.2018},
year = {2018},
doi = {10.1523/ENEURO.0251-18.2018},
publisher = {Society for Neuroscience},
abstract = {The transfer-appropriate processing (TAP) account holds that episodic memory depends on the overlap between encoding and retrieval processing. In the current study, we employed multivariate pattern analysis (MVPA) of electroencephalography to examine the relevance of spontaneously engaged visual processing during encoding for later retrieval. Human participants encoded word-picture associations, where the picture could be a famous face, a landmark, or an object. At test, we manipulated the retrieval demands by asking participants to retrieve either visual or verbal information about the pictures. MVPA revealed classification between picture categories during early perceptual stages of encoding (\~{}170 ms). Importantly, these visual category-specific neural patterns were predictive of later episodic remembering, but the direction of the relationship was contingent on the particular retrieval demand of the memory task: a benefit for the visual and a cost for the verbal. A reinstatement of the category-specific neural patterns established during encoding was observed during retrieval, and again the relationship with behavior varied with retrieval demands. Reactivation of visual representations during retrieval was associated with better memory in the visual task, but with lower performance in the verbal task. Our findings support and extend the TAP account by demonstrating that processing of particular aspects during memory formation can also have detrimental effects on later episodic remembering when other aspects of the event are called-for and shed new light on encoding and retrieval interactions in episodic memory.},
URL = {https://www.eneuro.org/content/5/4/ENEURO.0251-18.2018},
eprint = {https://www.eneuro.org/content/5/4/ENEURO.0251-18.2018.full.pdf},
journal = {eNeuro}
}