@article {1299, title = {Using Open Source, Open Data, and Civic Technology to Address the COVID-19 Pandemic and Infodemic.}, journal = {Yearb Med Inform}, year = {2021}, month = {2021 Apr 21}, abstract = {

OBJECTIVES: The emerging COVID-19 pandemic has caused one of the world{\textquoteright}s worst health disasters compounded by social confusion with misinformation, the so-called "Infodemic". In this paper, we discuss how open technology approaches - including data sharing, visualization, and tooling - can address the COVID-19 pandemic and infodemic.

METHODS: In response to the call for participation in the 2020 International Medical Informatics Association (IMIA) Yearbook theme issue on Medical Informatics and the Pandemic, the IMIA Open Source Working Group surveyed recent works related to the use of Free/Libre/Open Source Software (FLOSS) for this pandemic.

RESULTS: FLOSS health care projects including GNU Health, OpenMRS, DHIS2, and others, have responded from the early phase of this pandemic. Data related to COVID-19 have been published from health organizations all over the world. Civic Technology, and the collaborative work of FLOSS and open data groups were considered to support collective intelligence on approaches to managing the pandemic.

CONCLUSION: FLOSS and open data have been effectively used to contribute to managing the COVID-19 pandemic, and open approaches to collaboration can improve trust in data.

}, issn = {2364-0502}, doi = {10.1055/s-0041-1726488}, author = {Kobayashi, Shinji and Falc{\'o}n, Luis and Fraser, Hamish and Braa, J{\o}rn and Amarakoon, Pamod and Marcelo, Alvin and Paton, Chris} } @article {1220, title = {CODE STROKE ALERT-Concept and Development of a Novel Open-Source Platform to Streamline Acute Stroke Management.}, journal = {Front Neurol}, volume = {10}, year = {2019}, month = {2019}, pages = {725}, abstract = {

Effective, time-critical intervention in acute stroke is crucial to mitigate mortality rate and morbidity, but delivery of reperfusion treatments is often hampered by pre-, in-, or inter-hospital system level delays. Disjointed, repetitive, and inefficient communication is a consistent contributor to avoidable treatment delay. In the era of rapid reperfusion therapy for ischemic stroke, there is a need for a communication system to synchronize the flow of clinical information across the entire stroke journey. A multi-disciplinary development team designed an electronic communications platform, integrated between web browsers and a mobile application, to link all relevant members of the stroke treatment pathway. The platform uses tiered notifications, geotagging, incorporates multiple clinical score calculators, and is compliant with security regulations. The system safely saves relevant information for audit and research. Code Stroke Alert is a platform that can be accessed by emergency medical services (EMS) and hospital staff, coordinating the flow of information during acute stroke care, reducing duplication, and error in clinical information handover. Electronic data logs provide an auditable trail of relevant quality improvement metrics, facilitating quality improvement, and research. Code Stroke Alert will be freely available to health networks globally. The open-source nature of the software offers valuable potential for future development of plug-ins and add-ons, based on individual institutional needs. Prospective, multi-site implementation, and measurement of clinical impact are underway.

}, issn = {1664-2295}, doi = {10.3389/fneur.2019.00725}, author = {Seah, Huey Ming and Burney, Moe and Phan, Michael and Shell, Daniel and Wu, Jamin and Zhou, Kevin and Brooks, Owen and Coulton, Bronwyn and Maingard, Julian and Tang, Jennifer and Yazdabadi, Gohar and Tahayori, Bahman and Barras, Christen and Kok, Hong Kuan and Chandra, Ronil and Thijs, Vincent and Brooks, Duncan Mark and Asadi, Hamed} } @article {1226, title = {Comparison of Open-Source Electronic Health Record Systems Based on Functional and User Performance Criteria.}, journal = {Healthc Inform Res}, volume = {25}, year = {2019}, month = {2019 Apr}, pages = {89-98}, abstract = {

Objectives: Open-source Electronic Health Record (EHR) systems have gained importance. The main aim of our research is to guide organizational choice by comparing the features, functionality, and user-facing system performance of the five most popular open-source EHR systems.

Methods: We performed qualitative content analysis with a directed approach on recently published literature (2012-2017) to develop an integrated set of criteria to compare the EHR systems. The functional criteria are an integration of the literature, meaningful use criteria, and the Institute of Medicine{\textquoteright}s functional requirements of EHR, whereas the user-facing system performance is based on the time required to perform basic tasks within the EHR system.

Results: Based on the Alexa web ranking and Google Trends, the five most popular EHR systems at the time of our study were OSHERA VistA, GNU Health, the Open Medical Record System (OpenMRS), Open Electronic Medical Record (OpenEMR), and OpenEHR. We also found the trends in popularity of the EHR systems and the locations where they were more popular than others. OpenEMR met all the 32 functional criteria, OSHERA VistA met 28, OpenMRS met 12 fully and 11 partially, OpenEHR-based EHR met 10 fully and 3 partially, and GNU Health met the least with only 10 criteria fully and 2 partially.

Conclusions: Based on our functional criteria, OpenEMR is the most promising EHR system, closely followed by VistA. With regards to user-facing system performance, OpenMRS has superior performance in comparison to OpenEMR.

}, issn = {2093-3681}, doi = {10.4258/hir.2019.25.2.89}, author = {Purkayastha, Saptarshi and Allam, Roshini and Maity, Pallavi and Gichoya, Judy W} } @article {1213, title = {Governance and Sustainability of an Open Source Electronic Health Record: An Interpretive Case Study of OpenDolphin in Japan.}, journal = {Stud Health Technol Inform}, volume = {264}, year = {2019}, month = {2019 Aug 21}, pages = {739-743}, abstract = {

Electronic Health Records (EHRs) are at the heart of reforms aiming for improving the efficiency and quality of citizens healthcare services. Although there is still some skepticism, open source (OS) EHR is a growing phenomenon in health informatics. Given the widespread adoption of OS software (OSS) in several domains, including operating systems, and enterprise systems, the repeated shortfalls faced by healthcare organizations with dominant proprietary EHRs create an opportunity for other alternatives, such as OSS to demonstrate their abilities in addressing these well-documented problems, including inflexibility, high costs, and low interoperability. However, scholars have expressed extensive concerns about the sustainability of OS EHR. Recognizing that OSS project sustainability relies on their governance arrangements, this case study reports on the evolution of the governance and sustainability of a Japanese OS EHR project and provides rich insights to other open source EHR initiative stakeholders, including physicians, developers, researchers, and policymakers.

}, issn = {1879-8365}, doi = {10.3233/SHTI190321}, author = {Poba-Nzaou, Placide and Kume, Naoto and Kobayashi, Shinji} } @article {1271, title = {The impact of PEPFAR transition on HIV service delivery at health facilities in Uganda.}, journal = {PLoS One}, volume = {14}, year = {2019}, month = {2019}, pages = {e0223426}, abstract = {

BACKGROUND: Since 2004, the President{\textquoteright}s Emergency Plan for AIDS Relief (PEPFAR) has played a large role in Uganda{\textquoteright}s HIV/AIDS response. To better target resources to high burden regions and facilities, PEPFAR planned to withdraw from 29\% of previously-supported health facilities in Uganda between 2015 and 2017.

METHODS: We conducted a cross-sectional survey of 226 PEPFAR-supported health facilities in Uganda in mid-2017. The survey gathered information on availability, perceived quality, and access to HIV services before and after transition. We compare responses for facilities transitioned to those maintained on PEPFAR, accounting for survey design. We also extracted data from DHIS2 for the period October 2013-December 2017 on the number of HIV tests and counseling (HTC), number of patients on antiretroviral therapy (Current on ART), and retention on first-line ART (Retention) at 12 months. Using mixed effect models, we compare trends in service volume around the transition period.

RESULTS: There were 206 facilities that reported transition and 20 that reported maintenance on PEPFAR. Some facilities reporting transition may have been in a gap between implementing partners. The median transition date was September 2016, nine months prior to the survey. Transition facilities were more likely to discontinue HIV outreach following transition (51.6\% vs. 1.4\%, p<0.001) and to report declines in HIV care access (43.5\% vs. 3.1\%, p<0.001) and quality (35.6\% vs. 0\%, p<0.001). However, transition facilities did not differ in their trends in HIV service volume relative to maintenance facilities.

CONCLUSIONS: Transition from PEPFAR resulted in facilities reporting worsening patient access and service quality for HIV care, but there is insufficient evidence to suggest negative impacts on volume of HIV services. Facility respondents{\textquoteright} perceptions about access and quality may be overly pessimistic, or they may signal forthcoming impacts. Unrelated to transition, declining retention on ART in Uganda is a cause for concern.

}, issn = {1932-6203}, doi = {10.1371/journal.pone.0223426}, author = {Wilhelm, Jess Alan and Qiu, Mary and Paina, Ligia and Colantuoni, Elizabeth and Mukuru, Moses and Ssengooba, Freddie and Bennett, Sara} } @article {1278, title = {Large care gaps in primary care management of asthma: a longitudinal practice audit.}, journal = {BMJ Open}, volume = {9}, year = {2019}, month = {2019 01 29}, pages = {e022506}, abstract = {

OBJECTIVES: Care gaps in asthma may be highly prevalent but are poorly characterised. We sought to prospectively measure adherence to key evidence-based adult asthma practices in primary care, and predictors of these behaviours.

DESIGN: One-year prospective cohort study employing an electronic chart audit.

SETTING: Three family health teams (two academic, one community-based) in Ontario, Canada.

PARTICIPANTS: 884 patients (72.1\% female; 46.0{\textpm}17.5 years old) (4199 total visits; 4.8{\textpm}4.8 visits/patient) assigned to 23 physicians (65\% female; practising for 10.0{\textpm}8.6 years).

MAIN OUTCOME MEASURES: The primary outcome was the proportion of visits during which practitioners assessed asthma control according to symptom-based criteria. Secondary outcomes included the proportion of: patients who had asthma control assessed at least once; visits during which a controller medication was initiated or escalated; and patients who received a written asthma action plan. Behavioural predictors were established a priori and tested in a multivariable model.

RESULTS: Primary outcome: Providers assessed asthma control in 4.9\% of visits and 15.4\% of patients. Factors influencing assessment included clinic site (p=0.019) and presenting symptom, with providers assessing control more often during visits for asthma symptoms (35.0\%) or any respiratory symptoms (18.8\%) relative to other visits (1.6\%) (p<0.01).

SECONDARY OUTCOMES: Providers escalated controller therapy in 3.3\% of visits and 15.4\% of patients. Factors influencing escalation included clinic site, presenting symptom and prior objective asthma diagnosis. Escalation occurred more frequently during visits for asthma symptoms (21.0\%) or any respiratory symptoms (11.9\%) relative to other visits (1.5\%) (p<0.01) and in patients without a prior objective asthma diagnosis (3.5\%) relative to those with (1.3\%) (p=0.025). No asthma action plans were delivered.

CONCLUSIONS: Major gaps in evidence-based asthma practice exist in primary care. Targeted knowledge translation interventions are required to address these gaps, and can be tailored by leveraging the identified behavioural predictors.

TRIAL REGISTRATION NUMBER: NCT01070095; Pre-results.

}, issn = {2044-6055}, doi = {10.1136/bmjopen-2018-022506}, author = {Price, Courtney and Agarwal, Gina and Chan, David and Goel, Sanjeev and Kaplan, Alan G and Boulet, Louis-Philippe and Mamdani, Muhammad M and Straus, Sharon E and Lebovic, Gerald and Gupta, Samir} } @article {1275, title = {A national electronic health record for primary care.}, journal = {CMAJ}, volume = {191}, year = {2019}, month = {2019 01 14}, pages = {E28-E29}, keywords = {Canada, Delivery of Health Care, electronic health records, Health Policy, Primary Health Care}, issn = {1488-2329}, doi = {10.1503/cmaj.181647}, author = {Persaud, Nav} } @article {1228, title = {PatientExploreR: an extensible application for dynamic visualization of patient clinical history from Electronic Health Records in the OMOP Common Data Model Title.}, journal = {Bioinformatics}, year = {2019}, month = {2019 Jun 19}, abstract = {

MOTIVATION: Electronic Health Records (EHR) are quickly becoming omnipresent in healthcare, but interoperability issues and technical demands limit their use for biomedical and clinical research. Interactive and flexible software that interfaces directly with EHR data structured around a common data model could accelerate more EHR-based research by making the data more accessible to researchers who lack computational expertise and/or domain knowledge.

RESULTS: We present PatientExploreR, an extensible application built on the R/Shiny framework that interfaces with a relational database of EHR data in the Observational Medical Outcomes Partnership Common Data Model (CDM) format. PatientExploreR produces patient-level interactive and dynamic reports and facilitates visualization of clinical data without any programming required. It allows researchers to easily construct and export patient cohorts from the EHR for analysis with other software. This application could enable easier exploration of patient-level data for physicians and researchers. PatientExploreR can incorporate EHR data from any institution that employs the CDM for users with approved access. The software code is free and open-source under the MIT license, enabling institutions to install and users to expand and modify the application for their own purposes.

AVAILABILITY: PatientExploreR can be freely obtained from GitHub: https://github.com/BenGlicksberg/PatientExploreR. We provide instructions for how researchers with approved access to their institutional EHR can use this package. We also release an open sandbox server of synthesized patient data for users without EHR access to explore: http://patientexplorer.ucsf.edu.

SUPPLEMENTARY INFORMATION: Supplementary data are available at Bioinformatics online.

}, issn = {1367-4811}, doi = {10.1093/bioinformatics/btz409}, author = {Glicksberg, Benjamin S and Oskotsky, Boris and Thangaraj, Phyllis M and Giangreco, Nicholas and Badgeley, Marcus A and Johnson, Kipp W and Datta, Debajyoti and Rudrapatna, Vivek and Rappoport, Nadav and Shervey, Mark M and Miotto, Riccardo and Goldstein, Theodore C and Rutenberg, Eugenia and Frazier, Remi and Lee, Nelson and Israni, Sharat and Larsen, Rick and Percha, Bethany and Li, Li and Dudley, Joel T and Tatonetti, Nicholas P and Butte, Atul J} } @article {1216, title = {Transforming Two Decades of ePR Data to OMOP CDM for Clinical Research.}, journal = {Stud Health Technol Inform}, volume = {264}, year = {2019}, month = {2019 Aug 21}, pages = {233-237}, abstract = {

This paper presents the extract-transform-and-load (ETL) process from the Electronic Patient Records (ePR) at the Heart Institute (InCor) to the OMOP Common Data Model (CDM) format. We describe the initial database characterization, relational source mappings, selection filters, data transformations and patient de-identification using the open-source OHDSI tools and SQL scripts. We evaluate the resulting InCor-CDM database by recreating the same patient cohort from a previous reference study (over the original data source) and comparing the cohorts{\textquoteright} descriptive statistics and inclusion reports. The results exhibit that up to 91\% of the reference patients were retrieved by our method from the ePR through InCor-CDM, with AUC=0.938. The results indicate that the method that we employed was able to produce a new database that was both consistent with the original data and in accordance to the OMOP CDM standard.

}, issn = {1879-8365}, doi = {10.3233/SHTI190218}, author = {Lima, Daniel M and Rodrigues-Jr, Jose F and Traina, Agma J M and Pires, Fabio A and Gutierrez, Marco A} } @article {1274, title = {Development and Evaluation of a New Security and Privacy Track in a Health Informatics Graduate Program: Multidisciplinary Collaboration in Education.}, journal = {JMIR Med Educ}, volume = {4}, year = {2018}, month = {2018 Dec 21}, pages = {e19}, abstract = {

BACKGROUND: The widespread application of technologies such as electronic health record systems, mobile health apps, and telemedicine platforms, has made it easy for health care providers to collect relevant data and deliver health care regimens. While efficacious, these new technologies also pose serious security and privacy challenges.

OBJECTIVE: The training program described here aims at preparing well-informed health information security and privacy professionals with enhanced course materials and various approaches.

METHODS: A new educational track has been built within a health informatics graduate program. Several existing graduate courses have been enhanced with new security and privacy modules. New labs and seminars have been created, and students are being encouraged to participate in research projects and obtain real-world experience from industry partners. Students in this track receive both theoretical education and hands-on practice. Evaluations have been performed on this new track by conducting multiple surveys on a sample of students.

RESULTS: We have succeeded in creating a new security track and developing a pertinent curriculum. The newly created security materials have been implemented in multiple courses. Our evaluation indicated that students (N=72) believed that receiving security and privacy training was important for health professionals, the provided security contents were interesting, and having the enhanced security and privacy training in this program was beneficial for their future career.

CONCLUSIONS: The security and privacy education for health information professionals in this new security track has been significantly enhanced.

}, issn = {2369-3762}, doi = {10.2196/mededu.9081}, author = {Zhou, Leming and Parmanto, Bambang and Joshi, James} } @article {1203, title = {The {District} {Health} {Information} {System} ({DHIS}2): {A} literature review and meta-synthesis of its strengths and operational challenges based on the experiences of 11 countries}, journal = {Health Information Management: Journal of the Health Information Management Association of Australia}, year = {2018}, pages = {1833358318777713}, abstract = {BACKGROUND: Health information systems offer many potential benefits for healthcare, including financial benefits and for improving the quality of patient care. The purpose of District Health Information Systems (DHIS) is to document data that are routinely collected in all public health facilities in a country using the system. OBJECTIVE: The aim of this study was to examine the strengths and operational challenges of DHIS2, with a goal to enable decision makers in different counties to more accurately evaluate the outcomes of introducing DHIS2 into their particular country. METHOD: A review of the literature combined with the method of meta-synthesis was used to source information and interpret results relating to the strengths and operational challenges of DHIS2. Databases (Embase, PubMed, Scopus and Google Scholar) were searched for documents related to strengths and operational challenges of DHIS2, with no time limit up to 8 April 2017. The review and evaluation of selected studies was conducted in three stages: title, abstract and full text. Each of the selected studies was reviewed carefully and key concepts extracted. These key concepts were divided into two categories of strengths and operational challenges of DHIS2. Then, each category was grouped based on conceptual similarity to achieve the main themes and sub-themes. Content analysis was used to analyse extracted data. RESULTS: Of 766 identified citations, 20 studies from 11 countries were included and analysed in this study. Identified strengths in the DHIS were represented in seven themes (with 21 categories): technical features of software, proper management of data, application flexibility, networking and increasing the satisfaction of stakeholders, development of data management, increasing access to information and economic benefits. Operational challenges were identified and captured in 11 themes (with 18 categories): funds; appropriate communication infrastructure; the need for the existence of appropriate data; political, cultural, social and structural infrastructure; manpower; senior managers; training; using academic potentials; definition and standardising the deployment processes; neglect to application of criteria and clinical guidelines in the use of system; data security; stakeholder communications challenges and the necessity to establish a pilot system. CONCLUSION: This study highlighted specific strengths in the technical and functional aspects of DHIS2 and also drew attention to particular challenges and concerns. These results provide a sound evidence base for decision makers and policymakers to enable them to make more accurate decisions about whether or not to use the DHIS2 in the health system of their country.}, keywords = {content analysis, District Health Information System, health information system, health information technology, information technology, meta-synthesis, qualitative research}, issn = {1322-4913}, doi = {10.1177/1833358318777713}, author = {Dehnavieh, Reza and Haghdoost, AliAkbar and Khosravi, Ardeshir and Hoseinabadi, Fahime and Rahimi, Hamed and Poursheikhali, Atousa and Khajehpour, Nahid and Khajeh, Zahra and Mirshekari, Nadia and Hasani, Marziyeh and Radmerikhi, Samera and Haghighi, Hajar and Mehrolhassani, Mohammad Hossain and Kazemi, Elaheh and Aghamohamadi, Saeide} } @article {1282, title = {Effect of glycosylated hemoglobin on response to ranibizumab therapy in diabetic macular edema: real-world outcomes in 312 patients.}, journal = {Can J Ophthalmol}, volume = {53}, year = {2018}, month = {2018 08}, pages = {415-419}, abstract = {

OBJECTIVE: To investigate the effect of serum glycosylated hemoglobin (HbA1c) on the outcomes of ranibizumab therapy for diabetic macular edema (DME).

DESIGN: Retrospective cohort study.

PARTICIPANTS: Patients receiving ranibizumab injections for centre-involving DME in a National Health Service setting.

METHODS: The Moorfields OpenEyes database was used to study eyes with DME treated with ranibizumab from October 2013 to November 2015 at the Moorfields City Road, Ealing, Northwick Park, and St George{\textquoteright}s Hospital sites. Only eyes receiving a minimum of 3 injections and completing 12 months of follow-up were included. If both eyes received treatment, the first eye treated was analyzed. When both eyes received initial treatment simultaneously, random number tables were used to select the eye for analysis. HbA1c was tested at the initiation of ranibizumab treatment. Multivariate regression analysis was used to identify relationships between HbA1c and the outcome measures.

OUTCOMES: The primary outcome was change in visual acuity (VA) Early Treatment of Diabetic Retinopathy study (ETDRS) letters. The secondary outcomes were change in central subfield thickness (CSFT) and macular volume (MV), as well as number of injections in year 1.

RESULTS: Three hundred and twelve eyes of 312 patients were included in the analysis. HbA1c was not related to change in VA (p = 0.577), change in CSFT (p = 0.099), change in MV (p = 0.082), or number of injections in year 1 (p = 0.859).

CONCLUSIONS: HbA1c is not related to functional or anatomical outcomes at 1 year in DME treated with ranibizumab.

}, keywords = {Aged, Angiogenesis Inhibitors, Biomarkers, Diabetic Retinopathy, Female, Follow-Up Studies, Glycated Hemoglobin A, Humans, Intravitreal Injections, Macula Lutea, Macular Edema, Male, Middle Aged, Ranibizumab, Retrospective Studies, Tomography, Optical Coherence, Treatment Outcome, Vascular Endothelial Growth Factor A, Visual Acuity}, issn = {1715-3360}, doi = {10.1016/j.jcjo.2017.10.008}, author = {Shalchi, Zaid and Okada, Mali and Bruynseels, Alice and Palethorpe, David and Yusuf, Ammar and Hussain, Rohan and Herrspiegel, Christina and Scazzarriello, Antonio and Habib, Abubakar and Amin, Razia and Rajendram, Ranjan} } @article {1287, title = {Landmark detection in 2D bioimages for geometric morphometrics: a multi-resolution tree-based approach.}, journal = {Sci Rep}, volume = {8}, year = {2018}, month = {2018 01 11}, pages = {538}, abstract = {

The detection of anatomical landmarks in bioimages is a necessary but tedious step for geometric morphometrics studies in many research domains. We propose variants of a multi-resolution tree-based approach to speed-up the detection of landmarks in bioimages. We extensively evaluate our method variants on three different datasets (cephalometric, zebrafish, and drosophila images). We identify the key method parameters (notably the multi-resolution) and report results with respect to human ground truths and existing methods. Our method achieves recognition performances competitive with current existing approaches while being generic and fast. The algorithms are integrated in the open-source Cytomine software and we provide parameter configuration guidelines so that they can be easily exploited by end-users. Finally, datasets are readily available through a Cytomine server to foster future research.

}, keywords = {Algorithms, Animals, Body Weights and Measures, Drosophila, Humans, Image Processing, Computer-Assisted, Software, Zebrafish}, issn = {2045-2322}, doi = {10.1038/s41598-017-18993-5}, author = {Vandaele, R{\'e}my and Aceto, Jessica and Muller, Marc and P{\'e}ronnet, Fr{\'e}d{\'e}rique and Debat, Vincent and Wang, Ching-Wei and Huang, Cheng-Ta and Jodogne, S{\'e}bastien and Martinive, Philippe and Geurts, Pierre and Mar{\'e}e, Rapha{\"e}l} } @article {1235, title = {A Platform for Innovation and Standards Evaluation: a Case Study from the OpenMRS Open-Source Radiology Information System.}, journal = {J Digit Imaging}, volume = {31}, year = {2018}, month = {2018 06}, pages = {361-370}, abstract = {

Open-source development can provide a platform for innovation by seeking feedback from community members as well as providing tools and infrastructure to test new standards. Vendors of proprietary systems may delay adoption of new standards until there are sufficient incentives such as legal mandates or financial incentives to encourage/mandate adoption. Moreover, open-source systems in healthcare have been widely adopted in low- and middle-income countries and can be used to bridge gaps that exist in global health radiology. Since 2011, the authors, along with a community of open-source contributors, have worked on developing an open-source radiology information system (RIS) across two communities-OpenMRS and LibreHealth. The main purpose of the RIS is to implement core radiology workflows, on which others can build and test new radiology standards. This work has resulted in three major releases of the system, with current architectural changes driven by changing technology, development of new standards in health and imaging informatics, and changing user needs. At their core, both these communities are focused on building general-purpose EHR systems, but based on user contributions from the fringes, we have been able to create an innovative system that has been used by hospitals and clinics in four different countries. We provide an overview of the history of the LibreHealth RIS, the architecture of the system, overview of standards integration, describe challenges of developing an open-source product, and future directions. Our goal is to attract more participation and involvement to further develop the LibreHealth RIS into an Enterprise Imaging System that can be used in other clinical imaging including pathology and dermatology.

}, keywords = {Diagnostic Imaging, Humans, Radiology Information Systems, Software, Systems Integration, Workflow}, issn = {1618-727X}, doi = {10.1007/s10278-018-0088-5}, author = {Gichoya, Judy W and Kohli, Marc and Ivange, Larry and Schmidt, Teri S and Purkayastha, Saptarshi} } @article {1224, title = {Towards Implementation of OMOP in a German University Hospital Consortium.}, journal = {Appl Clin Inform}, volume = {9}, year = {2018}, month = {2018 01}, pages = {54-61}, abstract = {

BACKGROUND: In 2015, the German Federal Ministry of Education and Research initiated a large data integration and data sharing research initiative to improve the reuse of data from patient care and translational research. The Observational Medical Outcomes Partnership (OMOP) common data model and the Observational Health Data Sciences and Informatics (OHDSI) tools could be used as a core element in this initiative for harmonizing the terminologies used as well as facilitating the federation of research analyses across institutions.

OBJECTIVE: To realize an OMOP/OHDSI-based pilot implementation within a consortium of eight German university hospitals, evaluate the applicability to support data harmonization and sharing among them, and identify potential enhancement requirements.

METHODS: The vocabularies and terminological mapping required for importing the fact data were prepared, and the process for importing the data from the source files was designed. For eight German university hospitals, a virtual machine preconfigured with the OMOP database and the OHDSI tools as well as the jobs to import the data and conduct the analysis was provided. Last, a federated/distributed query to test the approach was executed.

RESULTS: While the mapping of ICD-10 German Modification succeeded with a rate of 98.8\% of all terms for diagnoses, the procedures could not be mapped and hence an extension to the OMOP standard terminologies had to be made.Overall, the data of 3 million inpatients with approximately 26 million conditions, 21 million procedures, and 23 million observations have been imported.A federated query to identify a cohort of colorectal cancer patients was successfully executed and yielded 16,701 patient cases visualized in a Sunburst plot.

CONCLUSION: OMOP/OHDSI is a viable open source solution for data integration in a German research consortium. Once the terminology problems can be solved, researchers can build on an active community for further development.

}, keywords = {Cooperative Behavior, Germany, Health Plan Implementation, Hospitals, University, Humans, Outcome Assessment (Health Care), Surveys and Questionnaires, Vocabulary}, issn = {1869-0327}, doi = {10.1055/s-0037-1617452}, author = {Maier, C and Lang, L and Storf, H and Vormstein, P and Bieber, R and Bernarding, J and Herrmann, T and Haverkamp, C and Horki, P and Laufer, J and Berger, F and H{\"o}ning, G and Fritsch, H W and Sch{\"u}ttler, J and Ganslandt, T and Prokosch, H U and Sedlmayr, M} } @article {1231, title = {Home Behavior Monitoring Module in OpenEMR: Use of home sensors as Patient-Generated Data (PGD) for elderly care.}, journal = {AMIA Annu Symp Proc}, volume = {2017}, year = {2017}, month = {2017}, pages = {2279-2283}, issn = {1942-597X}, author = {Casanova-Perez, Regina A and Padilla-Huamantinco, Pierre G and De Freitas-Vidal, Catharine I and Choi, Yong K} } @article {1285, title = {Long-Term Outcomes of Aflibercept Treatment for Neovascular Age-Related Macular Degeneration in a Clinical Setting.}, journal = {Am J Ophthalmol}, volume = {174}, year = {2017}, month = {2017 Feb}, pages = {160-168}, abstract = {

PURPOSE: To report 2-year treatment outcomes with intravitreal aflibercept for neovascular age-related macular degeneration (nAMD) in routine clinical practice.

DESIGN: Retrospective, nonrandomized, interventional case series.

METHODS: Retrospective analysis of electronic medical record (EMR) notes (OpenEyes) and paper case notes and review of spectral-domain optical coherence tomography (SDOCT) imaging of patients with consecutively treated eyes with previously untreated nAMD. Patients were commenced on aflibercept injections in 1 or both eyes from October 1, 2013 to December 31, 2013. Data including age, sex, visual acuity (VA) measured on Early Treatment Diabetic Retinopathy Study charts, injection episodes, and complications were recorded. Additionally, SDOCT data, including presence or absence of macular fluid and automated central subfield macular thickness (CSMT) at year 1 and 2, were recorded.

RESULTS: Of the 109 eyes of 102 patients treated, data from 94 eyes of 88 patients were available at 2-year follow-up (86\% of patients). In the analysis of 2-year outcomes, there were 58 women (65.9\%); the mean ({\textpm} standard deviation) age was 77.5 {\textpm} 8 years. Over the 2 years, these eyes received a median of 12 (mean, 11.4 {\textpm} 4) injections at a median of 100 (mean, 99.3 {\textpm} 5.3) weeks of follow-up. The mean VA changed from 55.9 {\textpm} 15 letters at baseline to 61.3 {\textpm} 16.9 letters (VA gain 5.4 letters) at 1 year and to 61 {\textpm} 17.1 letters (VA gain 5.1 {\textpm} 14.9 letters) at 2 years. The reduction in CSMT was 79~μm with absence of macular fluid in 72.7\% of the 88 eyes with SDOCT data available at 2-year follow-up.

CONCLUSIONS: The VA and SDOCT results compare favorably with outcomes seen in randomized controlled trials. The results suggest that good long-term outcomes can be achieved using aflibercept for nAMD in clinical settings.

}, keywords = {Aged, Dose-Response Relationship, Drug, Female, Fluorescein Angiography, Follow-Up Studies, Fundus Oculi, Humans, Intravitreal Injections, Macula Lutea, Macular Degeneration, Male, Receptors, Vascular Endothelial Growth Factor, Recombinant Fusion Proteins, Retinal Neovascularization, Retrospective Studies, Time Factors, Tomography, Optical Coherence, Treatment Outcome, Visual Acuity}, issn = {1879-1891}, doi = {10.1016/j.ajo.2016.09.038}, author = {Eleftheriadou, Maria and Vazquez-Alfageme, Clara and Citu, Cristina Maria and Crosby-Nwaobi, Roxanne and Sivaprasad, Sobha and Hykin, Philip and Hamilton, Robin D and Patel, Praveen J} } @article {1174, title = {A national standards-based assessment on functionality of electronic medical records systems used in {Kenyan} public-{Sector} health facilities}, journal = {International Journal of Medical Informatics}, volume = {97}, year = {2017}, pages = {68{\textendash}75}, abstract = {BACKGROUND: Variations in the functionality, content and form of electronic medical record systems (EMRs) challenge national roll-out of these systems as part of a national strategy to monitor HIV response. To enforce the EMRs minimum requirements for delivery of quality HIV services, the Kenya Ministry of Health (MoH) developed EMRs standards and guidelines. The standards guided the recommendation of EMRs that met a preset threshold for national roll-out. METHODS: Using a standards-based checklist, six review teams formed by the MoH EMRs Technical Working Group rated a total of 17 unique EMRs in 28 heath facilities selected by individual owners for their optimal EMR implementation. EMRs with an aggregate score of >=60\% against checklist criteria were identified by the MoH as suitable for upgrading and rollout to Kenyan public health facilities. RESULTS: In Kenya, existing EMRs scored highly in health information and reporting (mean score=71.8\%), followed by security, system features, core clinical information, and order entry criteria (mean score=58.1\%-55.9\%), and lowest against clinical decision support (mean score=17.6\%) and interoperability criteria (mean score=14.3\%). Four EMRs met the 60.0\% threshold: OpenMRS, IQ-Care, C-PAD and Funsoft. On the basis of the review, the MoH provided EMRs upgrade plans to owners of all the 17 systems reviewed. CONCLUSION: The standards-based review in Kenya represents an effort to determine level of conformance to the EMRs standards and prioritize EMRs for enhancement and rollout. The results support concentrated use of resources towards development of the four recommended EMRs. Further review should be conducted to determine the effect of the EMR-specific upgrade plans on the other 13 EMRs that participated in the review exercise.}, keywords = {Checklist, EMRs, Review, Standards}, issn = {1872-8243}, doi = {10.1016/j.ijmedinf.2016.09.013}, author = {Kang{\textquoteright}a, Samuel and Puttkammer, Nancy and Wanyee, Steven and Kimanga, Davies and Madrano, Jason and Muthee, Veronica and Odawo, Patrick and Sharma, Anjali and Oluoch, Tom and Robinson, Katherine and Kwach, James and Lober, William B.} } @article {1192, title = {{OpenICE} medical device interoperability platform overview and requirement analysis}, journal = {Biomedizinische Technik. Biomedical Engineering}, year = {2017}, abstract = {We give an overview of OpenICE, an open source implementation of the ASTM standard F2761 for the Integrated Clinical Environment (ICE) that leverages medical device interoperability, together with an analysis of the clinical and non-functional requirements and community process that inspired its design.}, keywords = {Interoperability, medical applications, patient safety, platform, requirements engineering}, issn = {1862-278X}, doi = {10.1515/bmt-2017-0040}, author = {Arney, David and Plourde, Jeffrey and Goldman, Julian M.} } @article {1202, title = {Open-source mobile digital platform for clinical trial data collection in low-resource settings}, journal = {BMJ innovations}, volume = {3}, year = {2017}, pages = {26{\textendash}31}, abstract = {BACKGROUND: Governments, universities and pan-African research networks are building durable infrastructure and capabilities for biomedical research in Africa. This offers the opportunity to adopt from the outset innovative approaches and technologies that would be challenging to retrofit into fully established research infrastructures such as those regularly found in high-income countries. In this context we piloted the use of a novel mobile digital health platform, designed specifically for low-resource environments, to support high-quality data collection in a clinical research study. OBJECTIVE: Our primary aim was to assess the feasibility of a using a mobile digital platform for clinical trial data collection in a low-resource setting. Secondarily, we sought to explore the potential benefits of such an approach. METHODS: The investigative site was a research institute in Nairobi, Kenya. We integrated an open-source platform for mobile data collection commonly used in the developing world with an open-source, standard platform for electronic data capture in clinical trials. The integration was developed using common data standards (Clinical Data Interchange Standards Consortium (CDISC) Operational Data Model), maximising the potential to extend the approach to other platforms. The system was deployed in a pharmacokinetic study involving healthy human volunteers. RESULTS: The electronic data collection platform successfully supported conduct of the study. Multidisciplinary users reported high levels of satisfaction with the mobile application and highlighted substantial advantages when compared with traditional paper record systems. The new system also demonstrated a potential for expediting data quality review. DISCUSSION AND CONCLUSIONS: This pilot study demonstrated the feasibility of using a mobile digital platform for clinical research data collection in low-resource settings. Sustainable scientific capabilities and infrastructure are essential to attract and support clinical research studies. Since many research structures in Africa are being developed anew, stakeholders should consider implementing innovative technologies and approaches.}, keywords = {clinical research, eSource, Global Health, mHealth, Reverse Innovations}, issn = {2055-642X}, doi = {10.1136/bmjinnov-2016-000164}, author = {van Dam, Joris and Omondi Onyango, Kevin and Midamba, Brian and Groosman, Nele and Hooper, Norman and Spector, Jonathan and Pillai, Goonaseelan Colin and Ogutu, Bernhards} } @article {1167, title = {{ConoSurf}: {Open}-source 3D scanning system based on a conoscopic holography device for acquiring surgical surfaces}, journal = {The international journal of medical robotics + computer assisted surgery: MRCAS}, year = {2016}, abstract = {BACKGROUND: A difficulty in computer-assisted interventions is acquiring the patient{\textquoteright}s anatomy intraoperatively. Standard modalities have several limitations: low image quality (ultrasound), radiation exposure (computed tomography) or high costs (magnetic resonance imaging). An alternative approach uses a tracked pointer; however, the pointer causes tissue deformation and requires sterilizing. Recent proposals, utilizing a tracked conoscopic holography device, have shown promising results without the previously mentioned drawbacks. METHODS: We have developed an open-source software system that enables real-time surface scanning using a conoscopic holography device and a wide variety of tracking systems, integrated into pre-existing and well-supported software solutions. RESULTS: The mean target registration error of point measurements was 1.46~mm. For a quick guidance scan, surface reconstruction improved the surface registration error compared with point-set registration. CONCLUSIONS: We have presented a system enabling real-time surface scanning using a tracked conoscopic holography device. Results show that it can be useful for acquiring the patient{\textquoteright}s anatomy during surgery.}, issn = {1478-596X}, doi = {10.1002/rcs.1788}, author = {Brudfors, Mikael and Garc{\'\i}a-V{\'a}zquez, Ver{\'o}nica and Ses{\'e}-Lucio, Bego{\~n}a and Marinetto, Eugenio and Desco, Manuel and Pascau, Javier} } @article {1150, title = {Free and open-source automated 3-D microscope.}, journal = {J Microsc}, year = {2016}, month = {2016 Aug 29}, abstract = {

Open-source technology not only has facilitated the expansion of the greater research community, but by lowering costs it has encouraged innovation and customizable design. The field of automated microscopy has continued to be a challenge in accessibility due the expense and inflexible, noninterchangeable stages. This paper presents a low-cost, open-source microscope 3-D stage. A RepRap 3-D printer was converted to an optical microscope equipped with a customized, 3-D printed holder for a USB microscope. Precision measurements were determined to have an average error of 10 μm at the maximum speed and 27 μm at the minimum recorded speed. Accuracy tests yielded an error of 0.15\%. The machine is a true 3-D stage and thus able to operate with USB microscopes or conventional desktop microscopes. It is larger than all commercial alternatives, and is thus capable of high-depth images over unprecedented areas and complex geometries. The repeatability is below 2-D microscope stages, but testing shows that it is adequate for the majority of scientific applications. The open-source microscope stage costs less than 3-9\% of the closest proprietary commercial stages. This extreme affordability vastly improves accessibility for 3-D microscopy throughout the world.

}, issn = {1365-2818}, doi = {10.1111/jmi.12433}, author = {Wijnen, Bas and Petersen, Emily E and Hunt, Emily J and Pearce, Joshua M} } @article {1135, title = {Improving documentation of clinical care within a clinical information network: an essential initial step in efforts to understand and improve care in Kenyan hospitals.}, journal = {BMJ Glob Health}, volume = {1}, year = {2016}, month = {2016 May 24}, pages = {e000028}, abstract = {

In many low income countries health information systems are poorly equipped to provide detailed information on hospital care and outcomes. Information is thus rarely used to support practice improvement. We describe efforts to tackle this challenge and to foster learning concerning collection and use of information. This could improve hospital services in Kenya. We are developing a Clinical Information Network, a collaboration spanning 14 hospitals, policy makers and researchers with the goal of improving information available on the quality of inpatient paediatric care across common childhood illnesses in Kenya. Standardised data from hospitals{\textquoteright} paediatric wards are collected using non-commercial and open source tools. We have implemented procedures for promoting data quality which are performed prior to a process of semi-automated analysis and routine report generation for hospitals in the network. In the first phase of the Clinical Information Network, we collected data on over 65 000 admission episodes. Despite clinicians{\textquoteright} initial unfamiliarity with routine performance reporting, we found that, as an initial focus, both engaging with each hospital and providing them information helped improve the quality of data and therefore reports. The process has involved mutual learning and building of trust in the data and should provide the basis for collaborative efforts to improve care, to understand patient outcome, and to evaluate interventions through shared learning. We have found that hospitals are willing to support the development of a clinically focused but geographically dispersed Clinical Information Network in a low-income setting. Such networks show considerable promise as platforms for collaborative efforts to improve care, to provide better information for decision making, and to enable locally relevant research.

}, issn = {2059-7908}, doi = {10.1136/bmjgh-2016-000028}, author = {Tuti, Timothy and Bitok, Michael and Malla, Lucas and Paton, Chris and Muinga, Naomi and Gathara, David and Gachau, Susan and Mbevi, George and Nyachiro, Wycliffe and Ogero, Morris and Julius, Thomas and Irimu, Grace and English, Mike} } @article {1131, title = {Increasing the impact of medical image computing using community-based open-access hackathons: The NA-MIC and 3D Slicer experience.}, journal = {Med Image Anal}, year = {2016}, month = {2016 Jul 7}, abstract = {

The National Alliance for Medical Image Computing (NA-MIC) was launched in 2004 with the goal of investigating and developing an open source software infrastructure for the extraction of information and knowledge from medical images using computational methods. Several leading research and engineering groups participated in this effort that was funded by the US National Institutes of Health through a variety of infrastructure grants. This effort transformed 3D Slicer from an internal, Boston-based, academic research software application into a professionally maintained, robust, open source platform with an international leadership and developer and user communities. Critical improvements to the widely used underlying open source libraries and tools-VTK, ITK, CMake, CDash, DCMTK-were an additional consequence of this effort. This project has contributed to close to a thousand peer-reviewed publications and a growing portfolio of US and international funded efforts expanding the use of these tools in new medical computing applications every year. In this editorial, we discuss what we believe are gaps in the way medical image computing is pursued today; how a well-executed research platform can enable discovery, innovation and reproducible science ("Open Science"); and how our quest to build such a software platform has evolved into a productive and rewarding social engineering exercise in building an open-access community with a shared vision.

}, issn = {1361-8423}, doi = {10.1016/j.media.2016.06.035}, author = {Kapur, Tina and Pieper, Steve and Fedorov, Andriy and Fillion-Robin, J-C and Halle, Michael and O{\textquoteright}Donnell, Lauren and Lasso, Andras and Ungi, Tamas and Pinter, Csaba and Finet, Julien and Pujol, Sonia and Jagadeesan, Jayender and Tokuda, Junichi and Norton, Isaiah and Estepar, Raul San Jose and Gering, David and Aerts, Hugo J W L and Jakab, Marianna and Hata, Nobuhiko and Ibanez, Luiz and Blezek, Daniel and Miller, Jim and Aylward, Stephen and Grimson, W Eric L and Fichtinger, Gabor and Wells, William M and Lorensen, William E and Schroeder, Will and Kikinis, Ron} } @article {1162, title = {Jenkins-{CI}, an {Open}-{Source} {Continuous} {Integration} {System}, as a {Scientific} {Data} and {Image}-{Processing} {Platform}}, journal = {Journal of Biomolecular Screening}, year = {2016}, abstract = {High-throughput screening generates large volumes of heterogeneous data that require a diverse set of computational tools for management, processing, and analysis. Building integrated, scalable, and robust computational workflows for such applications is challenging but highly valuable. Scientific data integration and pipelining facilitate standardized data processing, collaboration, and reuse of best practices. We describe how Jenkins-CI, an "off-the-shelf," open-source, continuous integration system, is used to build pipelines for processing images and associated data from high-content screening (HCS). Jenkins-CI provides numerous plugins for standard compute tasks, and its design allows the quick integration of external scientific applications. Using Jenkins-CI, we integrated CellProfiler, an open-source image-processing platform, with various HCS utilities and a high-performance Linux cluster. The platform is web-accessible, facilitates access and sharing of high-performance compute resources, and automates previously cumbersome data and image-processing tasks. Imaging pipelines developed using the desktop CellProfiler client can be managed and shared through a centralized Jenkins-CI repository. Pipelines and managed data are annotated to facilitate collaboration and reuse. Limitations with Jenkins-CI (primarily around the user interface) were addressed through the selection of helper plugins from the Jenkins-CI community.}, keywords = {CellProfiler, continuous integration, high-content screening, high-performance computing}, issn = {1552-454X}, doi = {10.1177/1087057116679993}, author = {Moutsatsos, Ioannis K. and Hossain, Imtiaz and Agarinis, Claudia and Harbinski, Fred and Abraham, Yann and Dobler, Luc and Zhang, Xian and Wilson, Christopher J. and Jenkins, Jeremy L. and Holway, Nicholas and Tallarico, John and Parker, Christian N.} } @article {1168, title = {Methods for {Specifying} {Scientific} {Data} {Standards} and {Modeling} {Relationships} with {Applications} to {Neuroscience}}, journal = {Frontiers in Neuroinformatics}, volume = {10}, year = {2016}, pages = {48}, abstract = {Neuroscience continues to experience a tremendous growth in data; in terms of the volume and variety of data, the velocity at which data is acquired, and in turn the veracity of data. These challenges are a serious impediment to sharing of data, analyses, and tools within and across labs. Here, we introduce BRAINformat, a novel data standardization framework for the design and management of scientific data formats. The BRAINformat library defines application-independent design concepts and modules that together create a general framework for standardization of scientific data. We describe the formal specification of scientific data standards, which facilitates sharing and verification of data and formats. We introduce the concept of Managed Objects, enabling semantic components of data formats to be specified as self-contained units, supporting modular and reusable design of data format components and file storage. We also introduce the novel concept of Relationship Attributes for modeling and use of semantic relationships between data objects. Based on these concepts we demonstrate the application of our framework to design and implement a standard format for electrophysiology data and show how data standardization and relationship-modeling facilitate data analysis and sharing. The format uses HDF5, enabling portable, scalable, and self-describing data storage and integration with modern high-performance computing for data-driven discovery. The BRAINformat library is open source, easy-to-use, and provides detailed user and developer documentation and is freely available at: https://bitbucket.org/oruebel/brainformat.}, keywords = {data format specification, electrophysiology, neuroscience, relationship modeling}, doi = {10.3389/fninf.2016.00048}, author = {R{\"u}bel, Oliver and Dougherty, Max and Prabhat, null and Denes, Peter and Conant, David and Chang, Edward F. and Bouchard, Kristofer} } @article {1170, title = {Open {Source} {Drug} {Discovery}: {Highly} {Potent} {Antimalarial} {Compounds} {Derived} from the {Tres} {Cantos} {Arylpyrroles}}, journal = {ACS central science}, volume = {2}, year = {2016}, pages = {687{\textendash}701}, abstract = {The development of new antimalarial compounds remains a pivotal part of the strategy for malaria elimination. Recent large-scale phenotypic screens have provided a wealth of potential starting points for hit-to-lead campaigns. One such public set is explored, employing an open source research mechanism in which all data and ideas were shared in real time, anyone was able to participate, and patents were not sought. One chemical subseries was found to exhibit oral activity but contained a labile ester that could not be replaced without loss of activity, and the original hit exhibited remarkable sensitivity to minor structural change. A second subseries displayed high potency, including activity within gametocyte and liver stage assays, but at the cost of low solubility. As an open source research project, unexplored avenues are clearly identified and may be explored further by the community; new findings may be cumulatively added to the present work.}, issn = {2374-7943}, doi = {10.1021/acscentsci.6b00086}, author = {Williamson, Alice E. and Ylioja, Paul M. and Robertson, Murray N. and Antonova-Koch, Yevgeniya and Avery, Vicky and Baell, Jonathan B. and Batchu, Harikrishna and Batra, Sanjay and Burrows, Jeremy N. and Bhattacharyya, Soumya and Calderon, Felix and Charman, Susan A. and Clark, Julie and Crespo, Benigno and Dean, Matin and Debbert, Stefan L. and Delves, Michael and Dennis, Adelaide S. M. and Deroose, Frederik and Duffy, Sandra and Fletcher, Sabine and Giaever, Guri and Hallyburton, Irene and Gamo, Francisco-Javier and Gebbia, Marinella and Guy, R. Kiplin and Hungerford, Zoe and Kirk, Kiaran and Lafuente-Monasterio, Maria J. and Lee, Anna and Meister, Stephan and Nislow, Corey and Overington, John P. and Papadatos, George and Patiny, Luc and Pham, James and Ralph, Stuart A. and Ruecker, Andrea and Ryan, Eileen and Southan, Christopher and Srivastava, Kumkum and Swain, Chris and Tarnowski, Matthew J. and Thomson, Patrick and Turner, Peter and Wallace, Iain M. and Wells, Timothy N. C. and White, Karen and White, Laura and Willis, Paul and Winzeler, Elizabeth A. and Wittlin, Sergio and Todd, Matthew H.} } @article {1121, title = {Open Source Drug Discovery with the Malaria Box Compound Collection for Neglected Diseases and Beyond.}, journal = {PLoS Pathog}, volume = {12}, year = {2016}, month = {2016 Jul}, pages = {e1005763}, abstract = {

A major cause of the paucity of new starting points for drug discovery is the lack of interaction between academia and industry. Much of the global resource in biology is present in universities, whereas the focus of medicinal chemistry is still largely within industry. Open source drug discovery, with sharing of information, is clearly a first step towards overcoming this gap. But the interface could especially be bridged through a scale-up of open sharing of physical compounds, which would accelerate the finding of new starting points for drug discovery. The Medicines for Malaria Venture Malaria Box is a collection of over 400 compounds representing families of structures identified in phenotypic screens of pharmaceutical and academic libraries against the Plasmodium falciparum malaria parasite. The set has now been distributed to almost 200 research groups globally in the last two years, with the only stipulation that information from the screens is deposited in the public domain. This paper reports for the first time on 236 screens that have been carried out against the Malaria Box and compares these results with 55 assays that were previously published, in a format that allows a meta-analysis of the combined dataset. The combined biochemical and cellular assays presented here suggest mechanisms of action for 135 (34\%) of the compounds active in killing multiple life-cycle stages of the malaria parasite, including asexual blood, liver, gametocyte, gametes and insect ookinete stages. In addition, many compounds demonstrated activity against other pathogens, showing hits in assays with 16 protozoa, 7 helminths, 9 bacterial and mycobacterial species, the dengue fever mosquito vector, and the NCI60 human cancer cell line panel of 60 human tumor cell lines. Toxicological, pharmacokinetic and metabolic properties were collected on all the compounds, assisting in the selection of the most promising candidates for murine proof-of-concept experiments and medicinal chemistry programs. The data for all of these assays are presented and analyzed to show how outstanding leads for many indications can be selected. These results reveal the immense potential for translating the dispersed expertise in biological assays involving human pathogens into drug discovery starting points, by providing open access to new families of molecules, and emphasize how a small additional investment made to help acquire and distribute compounds, and sharing the data, can catalyze drug discovery for dozens of different indications. Another lesson is that when multiple screens from different groups are run on the same library, results can be integrated quickly to select the most valuable starting points for subsequent medicinal chemistry efforts.

}, issn = {1553-7374}, doi = {10.1371/journal.ppat.1005763}, author = {Van Voorhis, Wesley C and Adams, John H and Adelfio, Roberto and Ahyong, Vida and Akabas, Myles H and Alano, Pietro and Alday, Aintzane and Alem{\'a}n Resto, Yesmalie and Alsibaee, Aishah and Alzualde, Ainhoa and Andrews, Katherine T and Avery, Simon V and Avery, Vicky M and Ayong, Lawrence and Baker, Mark and Baker, Stephen and Ben Mamoun, Choukri and Bhatia, Sangeeta and Bickle, Quentin and Bounaadja, Lotfi and Bowling, Tana and Bosch, J{\"u}rgen and Boucher, Lauren E and Boyom, Fabrice F and Brea, Jose and Brennan, Marian and Burton, Audrey and Caffrey, Conor R and Camarda, Grazia and Carrasquilla, Manuela and Carter, Dee and Belen Cassera, Maria and Chih-Chien Cheng, Ken and Chindaudomsate, Worathad and Chubb, Anthony and Colon, Beatrice L and Col{\'o}n-L{\'o}pez, Daisy D and Corbett, Yolanda and Crowther, Gregory J and Cowan, Noemi and D{\textquoteright}Alessandro, Sarah and Le Dang, Na and Delves, Michael and DeRisi, Joseph L and Du, Alan Y and Duffy, Sandra and Abd El-Salam El-Sayed, Shimaa and Ferdig, Michael T and Fern{\'a}ndez Robledo, Jos{\'e} A and Fidock, David A and Florent, Isabelle and Fokou, Patrick V T and Galstian, Ani and Gamo, Francisco Javier and Gokool, Suzanne and Gold, Ben and Golub, Todd and Goldgof, Gregory M and Guha, Rajarshi and Guiguemde, W Armand and Gural, Nil and Guy, R Kiplin and Hansen, Michael A E and Hanson, Kirsten K and Hemphill, Andrew and Hooft van Huijsduijnen, Rob and Horii, Takaaki and Horrocks, Paul and Hughes, Tyler B and Huston, Christopher and Igarashi, Ikuo and Ingram-Sieber, Katrin and Itoe, Maurice A and Jadhav, Ajit and Naranuntarat Jensen, Amornrat and Jensen, Laran T and Jiang, Rays H Y and Kaiser, Annette and Keiser, Jennifer and Ketas, Thomas and Kicka, Sebastien and Kim, Sunyoung and Kirk, Kiaran and Kumar, Vidya P and Kyle, Dennis E and Lafuente, Maria Jose and Landfear, Scott and Lee, Nathan and Lee, Sukjun and Lehane, Adele M and Li, Fengwu and Little, David and Liu, Liqiong and Llin{\'a}s, Manuel and Loza, Maria I and Lubar, Aristea and Lucantoni, Leonardo and Lucet, Isabelle and Maes, Louis and Mancama, Dalu and Mansour, Nuha R and March, Sandra and McGowan, Sheena and Medina Vera, Iset and Meister, Stephan and Mercer, Luke and Mestres, Jordi and Mfopa, Alvine N and Misra, Raj N and Moon, Seunghyun and Moore, John P and Morais Rodrigues da Costa, Francielly and M{\"u}ller, Joachim and Muriana, Arantza and Nakazawa Hewitt, Stephen and Nare, Bakela and Nathan, Carl and Narraidoo, Nathalie and Nawaratna, Sujeevi and Ojo, Kayode K and Ortiz, Diana and Panic, Gordana and Papadatos, George and Parapini, Silvia and Patra, Kailash and Pham, Ngoc and Prats, Sarah and Plouffe, David M and Poulsen, Sally-Ann and Pradhan, Anupam and Quevedo, Celia and Quinn, Ronald J and Rice, Christopher A and Abdo Rizk, Mohamed and Ruecker, Andrea and St Onge, Robert and Salgado Ferreira, Rafaela and Samra, Jasmeet and Robinett, Natalie G and Schlecht, Ulrich and Schmitt, Marjorie and Silva Villela, Filipe and Silvestrini, Francesco and Sinden, Robert and Smith, Dennis A and Soldati, Thierry and Spitzm{\"u}ller, Andreas and Stamm, Serge Maximilian and Sullivan, David J and Sullivan, William and Suresh, Sundari and Suzuki, Brian M and Suzuki, Yo and Swamidass, S Joshua and Taramelli, Donatella and Tchokouaha, Lauve R Y and Theron, Anjo and Thomas, David and Tonissen, Kathryn F and Townson, Simon and Tripathi, Abhai K and Trofimov, Valentin and Udenze, Kenneth O and Ullah, Imran and Vallieres, Cindy and Vigil, Edgar and Vinetz, Joseph M and Voong Vinh, Phat and Vu, Hoan and Watanabe, Nao-Aki and Weatherby, Kate and White, Pamela M and Wilks, Andrew F and Winzeler, Elizabeth A and Wojcik, Edward and Wree, Melanie and Wu, Wesley and Yokoyama, Naoaki and Zollo, Paul H A and Abla, Nada and Blasco, Benjamin and Burrows, Jeremy and Laleu, Beno{\^\i}t and Leroy, Didier and Spangenberg, Thomas and Wells, Timothy and Willis, Paul A} } @article {1138, title = {Open source posturography.}, journal = {Acta Otolaryngol}, year = {2016}, month = {2016 Jul 6}, pages = {1-5}, abstract = {

CONCLUSION: The proposed validation goal of 0.9 in intra-class correlation coefficient was reached with the results of this study. With the obtained results we consider that the developed software (RombergLab) is a validated balance assessment software. The reliability of this software is dependent of the used force platform technical specifications.

OBJECTIVE: Develop and validate a posturography software and share its source code in open source terms.

METHODS: Prospective non-randomized validation study: 20 consecutive adults underwent two balance assessment tests, six condition posturography was performed using a clinical approved software and force platform and the same conditions were measured using the new developed open source software using a low cost force platform. Intra-class correlation index of the sway area obtained from the center of pressure variations in both devices for the six conditions was the main variable used for validation.

RESULTS: Excellent concordance between RombergLab and clinical approved force platform was obtained (intra-class correlation coefficient =0.94). A Bland and Altman graphic concordance plot was also obtained. The source code used to develop RombergLab was published in open source terms.

}, issn = {1651-2251}, doi = {10.1080/00016489.2016.1204665}, author = {Rey-Martinez, Jorge and P{\'e}rez-Fern{\'a}ndez, Nicol{\'a}s} } @article {1124, title = {Open-source LIMS in Vietnam: The path toward sustainability and host country ownership.}, journal = {Int J Med Inform}, volume = {93}, year = {2016}, month = {2016 Sep}, pages = {92-102}, abstract = {

OBJECTIVE: The objectives of this case report are as follows: to describe the process of establishing a national laboratory information management system (LIMS) program for clinical and public health laboratories in Vietnam; to evaluate the outcomes and lessons learned; and to present a model for sustainability based on the program outcomes that could be applied to diverse laboratory programs.

METHODS: This case report comprises a review of program documentation and records, including planning and budgetary records of the donor, monthly reports from the implementer, direct observation, and ad-hoc field reports from technical advisors and governmental agencies. Additional data on program efficacy and user acceptance were collected from routine monitoring of laboratory policies and operational practices.

RESULTS: LIMS software was implemented at 38 hospital, public health and HIV testing laboratories in Vietnam. This LIMS was accepted by users and program managers as a useful tool to support laboratory processes. Implementation cost per laboratory and average duration of deployment decreased over time, and project stakeholders initiated transition of financing (from the donor to local institutions) and of system maintenance functions (from the implementer to governmental and site-level staff). Collaboration between the implementer in Vietnam and the global LIMS user community was strongly established, and knowledge was successfully transferred to staff within Vietnam.

CONCLUSION: Implementing open-sourced LIMS with local development and support was a feasible approach towards establishing a sustainable laboratory informatics program that met the needs of health laboratories in Vietnam. Further effort to institutionalize IT support capacity within key government agencies is ongoing.

}, issn = {1872-8243}, doi = {10.1016/j.ijmedinf.2016.06.010}, author = {Landgraf, Kenneth M and Kakkar, Reshma and Meigs, Michelle and Jankauskas, Paul T and Phan, Thi Thu Huong and Nguyen, Viet Nga and Nguyen, Duy Thai and Duong, Thanh Tung and Nguyen, Thi Hoa and Bond, Kyle B} } @article {1148, title = {Open-source, small-animal magnetic resonance-guided focused ultrasound system.}, journal = {J Ther Ultrasound}, volume = {4}, year = {2016}, month = {2016}, pages = {22}, abstract = {

BACKGROUND: MR-guided focused ultrasound or high-intensity focused ultrasound (MRgFUS/MRgHIFU) is a non-invasive therapeutic modality with many potential applications in areas such as cancer therapy, drug delivery, and blood-brain barrier opening. However, the large financial costs involved in developing preclinical MRgFUS systems represent a barrier to research groups interested in developing new techniques and applications. We aim to mitigate these challenges by detailing a validated, open-source preclinical MRgFUS system capable of delivering thermal and mechanical FUS in a quantifiable and repeatable manner under real-time MRI guidance.

METHODS: A hardware and software package was developed that includes closed-loop feedback controlled thermometry code and CAD drawings for a therapy table designed for a preclinical MRI scanner. For thermal treatments, the modular software uses a proportional integral derivative controller to maintain a precise focal temperature rise in the target given input from MR phase images obtained concurrently. The software computes the required voltage output and transmits it to a FUS transducer that is embedded in the delivery table within the magnet bore. The delivery table holds the FUS transducer, a small animal and its monitoring equipment, and a transmit/receive RF coil. The transducer is coupled to the animal via a water bath and is translatable in two dimensions from outside the magnet. The transducer is driven by a waveform generator and amplifier controlled by real-time software in Matlab. MR acoustic radiation force imaging is also implemented to confirm the position of the focus for mechanical and thermal treatments.

RESULTS: The system was validated in tissue-mimicking phantoms and in vivo during murine tumor hyperthermia treatments. Sonications were successfully controlled over a range of temperatures and thermal doses for up to 20 min with minimal temperature overshoot. MR thermometry was validated with an optical temperature probe, and focus visualization was achieved with acoustic radiation force imaging.

CONCLUSIONS: We developed an MRgFUS platform for small-animal treatments that robustly delivers accurate, precise, and controllable sonications over extended time periods. This system is an open source and could increase the availability of low-cost small-animal systems to interdisciplinary researchers seeking to develop new MRgFUS applications and technology.

}, issn = {2050-5736}, doi = {10.1186/s40349-016-0066-7}, author = {Poorman, Megan E and Chaplin, Vandiver L and Wilkens, Ken and Dockery, Mary D and Giorgio, Todd D and Grissom, William A and Caskey, Charles F} } @article {1164, title = {Process control charts in infection prevention: {Make} it simple to make it happen}, journal = {American Journal of Infection Control}, year = {2016}, abstract = {BACKGROUND: Quality improvement is central to Infection Prevention and Control (IPC) programs. Challenges may occur when applying quality improvement methodologies like process control charts, often due to the limited exposure of typical IPs. Because of this, our team created an open-source database with a process control chart generator for IPC programs. The objectives of this report are to outline the development of the application and demonstrate application using simulated data. METHODS: We used Research Electronic Data Capture (REDCap Consortium, Vanderbilt University, Nashville, TN), R (R Foundation for Statistical Computing, Vienna, Austria), and R Studio Shiny (R Foundation for Statistical Computing) to create an open source data collection system with automated process control chart generation. We used simulated data to test and visualize both in-control and out-of-control processes for commonly used metrics in IPC programs. RESULTS: The R code for implementing the control charts and Shiny application can be found on our Web site (https://github.com/ul-research-support/spcapp). Screen captures of the workflow and simulated data indicating both common cause and special cause variation are provided. CONCLUSIONS: Process control charts can be easily developed based on individual facility needs using freely available software. Through providing our work free to all interested parties, we hope that others will be able to harness the power and ease of use of the application for improving the quality of care and patient safety in their facilities.}, keywords = {Health care-associated infection, Quality Improvement, Surveillance}, issn = {1527-3296}, doi = {10.1016/j.ajic.2016.09.021}, author = {Wiemken, Timothy L. and Furmanek, Stephen P. and Carrico, Ruth M. and Mattingly, William A. and Persaud, Annuradha K. and Guinn, Brian E. and Kelley, Robert R. and Ramirez, Julio A.} } @article {1161, title = {Reliability of infarct volumetry: {Its} relevance and the improvement by a software-assisted approach}, journal = {Journal of Cerebral Blood Flow and Metabolism: Official Journal of the International Society of Cerebral Blood Flow and Metabolism}, year = {2016}, abstract = {Despite the efficacy of neuroprotective approaches in animal models of stroke, their translation has so far failed from bench to bedside. One reason is presumed to be a low quality of preclinical study design, leading to bias and a low a priori power. In this study, we propose that the key read-out of experimental stroke studies, the volume of the ischemic damage as commonly measured by free-handed planimetry of TTC-stained brain sections, is subject to an unrecognized low inter-rater and test-retest reliability with strong implications for statistical power and bias. As an alternative approach, we suggest a simple, open-source, software-assisted method, taking advantage of automatic-thresholding techniques. The validity and the improvement of reliability by an automated method to tMCAO infarct volumetry are demonstrated. In addition, we show the probable consequences of increased reliability for precision, p-values, effect inflation, and power calculation, exemplified by a systematic analysis of experimental stroke studies published in the year 2015. Our study reveals an underappreciated quality problem in translational stroke research and suggests that software-assisted infarct volumetry might help to improve reproducibility and therefore the robustness of bench to bedside translation.}, keywords = {experimental stroke, Image analysis, middle cerebral artery occlusion, Neuroprotection, power}, issn = {1559-7016}, doi = {10.1177/0271678X16681311}, author = {Friedl{\"a}nder, Felix and Bohmann, Ferdinand and Brunkhorst, Max and Chae, Ju-Hee and Devraj, Kavi and K{\"o}hler, Yvette and Kraft, Peter and Kuhn, Hannah and Lucaciu, Alexandra and Luger, Sebastian and Pfeilschifter, Waltraud and Sadler, Rebecca and Liesz, Arthur and Scholtyschik, Karolina and Stolz, Leonie and Vutukuri, Rajkumar and Brunkhorst, Robert} } @article {1112, title = {TACIT: An open-source text analysis, crawling, and interpretation tool.}, journal = {Behav Res Methods}, year = {2016}, month = {2016 Mar 4}, abstract = {

As human activity and interaction increasingly take place online, the digital residues of these activities provide a valuable window into a range of psychological and social processes. A great deal of progress has been made toward utilizing these opportunities; however, the complexity of managing and analyzing the quantities of data currently available has limited both the types of analysis used and the number of researchers able to make use of these data. Although fields such as computer science have developed a range of techniques and methods for handling these difficulties, making use of those tools has often required specialized knowledge and programming experience. The Text Analysis, Crawling, and Interpretation Tool (TACIT) is designed to bridge this gap by providing an intuitive tool and interface for making use of state-of-the-art methods in text analysis and large-scale data management. Furthermore, TACIT is implemented as an open, extensible, plugin-driven architecture, which will allow other researchers to extend and expand these capabilities as new methods become available.

}, issn = {1554-3528}, doi = {10.3758/s13428-016-0722-4}, author = {Dehghani, Morteza and Johnson, Kate M and Garten, Justin and Boghrati, Reihane and Hoover, Joe and Balasubramanian, Vijayan and Singh, Anurag and Shankar, Yuvarani and Pulickal, Linda and Rajkumar, Aswin and Parmar, Niki Jitendra} } @article {1139, title = {VirusMapper: open-source nanoscale mapping of viral architecture through super-resolution microscopy.}, journal = {Sci Rep}, volume = {6}, year = {2016}, month = {2016}, pages = {29132}, abstract = {

The nanoscale molecular assembly of mammalian viruses during their infectious life cycle remains poorly understood. Their small dimensions, generally bellow the 300nm diffraction limit of light microscopes, has limited most imaging studies to electron microscopy. The recent development of super-resolution (SR) light microscopy now allows the visualisation of viral structures at resolutions of tens of nanometers. In addition, these techniques provide the added benefit of molecular specific labelling and the capacity to investigate viral structural dynamics using live-cell microscopy. However, there is a lack of robust analytical tools that allow for precise mapping of viral structure within the setting of infection. Here we present an open-source analytical framework that combines super-resolution imaging and na{\"\i}ve single-particle analysis to generate unbiased molecular models. This tool, VirusMapper, is a high-throughput, user-friendly, ImageJ-based software package allowing for automatic statistical mapping of conserved multi-molecular structures, such as viral substructures or intact viruses. We demonstrate the usability of VirusMapper by applying it to SIM and STED images of vaccinia virus in isolation and when engaged with host cells. VirusMapper allows for the generation of accurate, high-content, molecular specific virion models and detection of nanoscale changes in viral architecture.

}, issn = {2045-2322}, doi = {10.1038/srep29132}, author = {Gray, Robert D M and Beerli, Corina and Pereira, Pedro Matos and Scherer, Kathrin Maria and Samolej, Jerzy and Bleck, Christopher Karl Ernst and Mercer, Jason and Henriques, Ricardo} } @article {1132, title = {Web-based GIS for spatial pattern detection: application to malaria incidence in Vietnam.}, journal = {Springerplus}, volume = {5}, year = {2016}, month = {2016}, pages = {1014}, abstract = {

INTRODUCTION: There is a great concern on how to build up an interoperable health information system of public health and health information technology within the development of public information and health surveillance programme. Technically, some major issues remain regarding to health data visualization, spatial processing of health data, health information dissemination, data sharing and the access of local communities to health information. In combination with GIS, we propose a technical framework for web-based health data visualization and spatial analysis.

METHODS: Data was collected from open map-servers and geocoded by open data kit package and data geocoding tools. The Web-based system is designed based on Open-source frameworks and libraries. The system provides Web-based analyst tool for pattern detection through three spatial tests: Nearest neighbour, K function, and Spatial Autocorrelation.

RESULTS: The result is a web-based GIS, through which end users can detect disease patterns via selecting area, spatial test parameters and contribute to managers and decision makers. The end users can be health practitioners, educators, local communities, health sector authorities and decision makers. This web-based system allows for the improvement of health related services to public sector users as well as citizens in a secure manner.

CONCLUSIONS: The combination of spatial statistics and web-based GIS can be a solution that helps empower health practitioners in direct and specific intersectional actions, thus provide for better analysis, control and decision-making.

}, issn = {2193-1801}, doi = {10.1186/s40064-016-2518-5}, author = {Bui, Thanh Quang and Pham, Hai Minh} } @article {978, title = {DICOM for Clinical Research: PACS-Integrated Electronic Data Capture in Multi-Center Trials.}, journal = {J Digit Imaging}, year = {2015}, month = {2015 May 23}, abstract = {

Providing surrogate endpoints in clinical trials, medical imaging has become increasingly important in human-centered research. Nowadays, electronic data capture systems (EDCS) are used but binary image data is integrated insufficiently. There exists no structured way, neither to manage digital imaging and communications in medicine (DICOM) data in EDCS nor to interconnect EDCS with picture archiving and communication systems (PACS). Manual detours in the trial workflow yield errors, delays, and costs. In this paper, requirements for a DICOM-based system interconnection of EDCS and research PACS are analysed. Several workflow architectures are compared. Optimized for multi-center trials, we propose an entirely web-based solution integrating EDCS, PACS, and DICOM viewer, which has been implemented using the open source projects OpenClinica, DCM4CHEE, and Weasis, respectively. The EDCS forms the primary access point. EDCS to PACS interchange is integrated seamlessly on the data and the context levels. DICOM data is viewed directly from the electronic case report form (eCRF), while PACS-based management is hidden from the user. Data privacy is ensured by automatic de-identification and re-labelling with study identifiers. Our concept is evaluated on a variety of 13 DICOM modalities and transfer syntaxes. We have implemented the system in an ongoing investigator-initiated trial (IIT), where five centers have recruited 24 patients so far, performing decentralized computed tomography (CT) screening. Using our system, the chief radiologist is reading DICOM data directly from the eCRF. Errors and workflow processing time are reduced. Furthermore, an imaging database is built that may support future research.

}, issn = {1618-727X}, doi = {10.1007/s10278-015-9802-8}, author = {Haak, Daniel and Page, Charles-E and Reinartz, Sebastian and Kr{\"u}ger, Thilo and Deserno, Thomas M} } @article {997, title = {Innovating to enhance clinical data management using non-commercial and open source solutions across a multi-center network supporting inpatient pediatric care and research in Kenya.}, journal = {J Am Med Inform Assoc}, year = {2015}, month = {2015 Jun 10}, abstract = {

OBJECTIVE: To share approaches and innovations adopted to deliver a relatively inexpensive clinical data management (CDM) framework within a low-income setting that aims to deliver quality pediatric data useful for supporting research, strengthening the information culture and informing improvement efforts in local clinical practice.

MATERIALS AND METHODS: The authors implemented a CDM framework to support a Clinical Information Network (CIN) using Research Electronic Data Capture (REDCap), a noncommercial software solution designed for rapid development and deployment of electronic data capture tools. It was used for collection of standardized data from case records of multiple hospitals{\textquoteright} pediatric wards. R, an open-source statistical language, was used for data quality enhancement, analysis, and report generation for the hospitals.

RESULTS: In the first year of CIN, the authors have developed innovative solutions to support the implementation of a secure, rapid pediatric data collection system spanning 14 hospital sites with stringent data quality checks. Data have been collated on over 37 000 admission episodes, with considerable improvement in clinical documentation of admissions observed. Using meta-programming techniques in R, coupled with branching logic, randomization, data lookup, and Application Programming Interface (API) features offered by REDCap, CDM tasks were configured and automated to ensure quality data was delivered for clinical improvement and research use.

CONCLUSION: A low-cost clinically focused but geographically dispersed quality CDM (Clinical Data Management) in a long-term, multi-site, and real world context can be achieved and sustained and challenges can be overcome through thoughtful design and implementation of open-source tools for handling data and supporting research.

}, issn = {1527-974X}, doi = {10.1093/jamia/ocv028}, author = {Tuti, Timothy and Bitok, Michael and Paton, Chris and Makone, Boniface and Malla, Lucas and Muinga, Naomi and Gathara, David and English, Mike} } @article {974, title = {Omics Metadata Management Software (OMMS).}, journal = {Bioinformation}, volume = {11}, year = {2015}, month = {2015}, pages = {165-72}, abstract = {Next-generation sequencing projects have underappreciated information management tasks requiring detailed attention to specimen curation, nucleic acid sample preparation and sequence production methods required for downstream data processing, comparison, interpretation, sharing and reuse. The few existing metadata management tools for genome-based studies provide weak curatorial frameworks for experimentalists to store and manage idiosyncratic, project-specific information, typically offering no automation supporting unified naming and numbering conventions for sequencing production environments that routinely deal with hundreds, if not thousands of samples at a time. Moreover, existing tools are not readily interfaced with bioinformatics executables, (e.g., BLAST, Bowtie2, custom pipelines). Our application, the Omics Metadata Management Software (OMMS), answers both needs, empowering experimentalists to generate intuitive, consistent metadata, and perform analyses and information management tasks via an intuitive web-based interface. Several use cases with short-read sequence datasets are provided to validate installation and integrated function, and suggest possible methodological road maps for prospective users. Provided examples highlight possible OMMS workflows for metadata curation, multistep analyses, and results management and downloading. The OMMS can be implemented as a stand alone-package for individual laboratories, or can be configured for webbased deployment supporting geographically-dispersed projects. The OMMS was developed using an open-source software base, is flexible, extensible and easily installed and executed. The OMMS can be obtained at http://omms.sandia.gov.

AVAILABILITY: The OMMS can be obtained at http://omms.sandia.gov.

}, issn = {0973-2063}, doi = {10.6026/97320630011165}, author = {Perez-Arriaga, Martha O and Wilson, Susan and Williams, Kelly P and Schoeniger, Joseph and Waymire, Russel L and Powell, Amy Jo} } @article {985, title = {Open source libraries and frameworks for biological data visualisation: a guide for developers.}, journal = {Proteomics}, volume = {15}, year = {2015}, month = {2015 Apr}, pages = {1356-74}, abstract = {

Recent advances in high-throughput experimental techniques have led to an exponential increase in both the size and the complexity of the data sets commonly studied in biology. Data visualisation is increasingly used as the key to unlock this data, going from hypothesis generation to model evaluation and tool implementation. It is becoming more and more the heart of bioinformatics workflows, enabling scientists to reason and communicate more effectively. In parallel, there has been a corresponding trend towards the development of related software, which has triggered the maturation of different visualisation libraries and frameworks. For bioinformaticians, scientific programmers and software developers, the main challenge is to pick out the most fitting one(s) to create clear, meaningful and integrated data visualisation for their particular use cases. In this review, we introduce a collection of open source or free to use libraries and frameworks for creating data visualisation, covering the generation of a wide variety of charts and graphs. We will focus on software written in Java, JavaScript or Python. We truly believe this software offers the potential to turn tedious data into exciting visual stories.

}, issn = {1615-9861}, doi = {10.1002/pmic.201400377}, author = {Wang, Rui and Perez-Riverol, Yasset and Hermjakob, Henning and Vizca{\'\i}no, Juan Antonio} } @article {995, title = {OpenHELP (Heidelberg laparoscopy phantom): development of an open-source surgical evaluation and training tool.}, journal = {Surg Endosc}, year = {2015}, month = {2015 Feb 12}, abstract = {

BACKGROUND: Apart from animal testing and clinical trials, surgical research and laparoscopic training mainly rely on phantoms. The aim of this project was to design a phantom with realistic anatomy and haptic characteristics, modular design and easy reproducibility. The phantom was named open-source Heidelberg laparoscopic phantom (OpenHELP) and serves as an open-source platform.

METHODS: The phantom was based on an anonymized CT scan of a male patient. The anatomical structures were segmented to obtain digital three-dimensional models of the torso and the organs. The digital models were materialized via rapid prototyping. One flexible, using an elastic abdominal wall, and one rigid method, using a plastic shell, to simulate pneumoperitoneum were developed. Artificial organ production was carried out sequentially starting from raw gypsum models to silicone molds to final silicone casts. The reproduction accuracy was exemplarily evaluated for ten silicone rectum models by comparing the digital 3D surface of the original rectum with CT scan by calculating the root mean square error of surface variations. Haptic realism was also evaluated to find the most realistic silicone compositions on a visual analog scale (VAS, 0-10).

RESULTS: The rigid and durable plastic torso and soft silicone organs of the abdominal cavity were successfully produced. A simulation of pneumoperitoneum could be created successfully by both methods. The reproduction accuracy of ten silicone rectum models showed an average root mean square error of 2.26 (0-11.48) mm. Haptic realism revealed an average value on a VAS of 7.25 (5.2-9.6) for the most realistic rectum.

CONCLUSION: The OpenHELP phantom proved to be feasible and accurate. The phantom was consecutively applied frequently in the field of computer-assisted surgery at our institutions and is accessible as an open-source project at www.open-cas.org for the academic community.

}, issn = {1432-2218}, doi = {10.1007/s00464-015-4094-0}, author = {Kenngott, H G and W{\"u}nscher, J J and Wagner, M and Preukschas, A and Wekerle, A L and Neher, P and Suwelack, S and Speidel, S and Nickel, F and Oladokun, D and Maier-Hein, L and Dillmann, R and Meinzer, H P and M{\"u}ller-Stich, B P} } @article {999, title = {Role of OpenEHR as an open source solution for the regional modelling of patient data in obstetrics.}, journal = {J Biomed Inform}, volume = {55}, year = {2015}, month = {2015 Jun}, pages = {174-87}, abstract = {

This work investigates, whether openEHR with its reference model, archetypes and templates is suitable for the digital representation of demographic as well as clinical data. Moreover, it elaborates openEHR as a tool for modelling Hospital Information Systems on a regional level based on a national logical infrastructure. OpenEHR is a dual model approach developed for the modelling of Hospital Information Systems enabling semantic interoperability. A holistic solution to this represents the use of dual model based Electronic Healthcare Record systems. Modelling data in the field of obstetrics is a challenge, since different regions demand locally specific information for the process of treatment. Smaller health units in developing countries like Brazil or Malaysia, which until recently handled automatable processes like the storage of sensitive patient data in paper form, start organizational reconstruction processes. This archetype proof-of-concept investigation has tried out some elements of the openEHR methodology in cooperation with a health unit in Colombo, Brazil. Two legal forms provided by the Brazilian Ministry of Health have been analyzed and classified into demographic and clinical data. LinkEHR-Ed editor was used to read, edit and create archetypes. Results show that 33 clinical and demographic concepts, which are necessary to cover data demanded by the Unified National Health System, were identified. Out of the concepts 61\% were reused and 39\% modified to cover domain requirements. The detailed process of reuse, modification and creation of archetypes is shown. We conclude that, although a major part of demographic and clinical patient data were already represented by existing archetypes, a significant part required major modifications. In this study openEHR proved to be a highly suitable tool in the modelling of complex health data. In combination with LinkEHR-Ed software it offers user-friendly and highly applicable tools, although the complexity built by the vast specifications requires expert networks to define generally excepted clinical models. Finally, this project has pointed out main benefits enclosing high coverage of obstetrics data on the Clinical Knowledge Manager, simple modelling, and wide network and support using openEHR. Moreover, barriers described are enclosing the allocation of clinical content to respective archetypes, as well as stagnant adaption of changes on the Clinical Knowledge Manager leading to redundant efforts in data contribution that need to be addressed in future works.

}, issn = {1532-0480}, doi = {10.1016/j.jbi.2015.04.004}, author = {Pahl, Christina and Zare, Mojtaba and Nilashi, Mehrbakhsh and de Faria Borges, Marco Aur{\'e}lio and Weingaertner, Daniel and Detschew, Vesselin and Supriyanto, Eko and Ibrahim, Othman} } @article {986, title = {Taking advantage of continuity of care documents to populate a research repository.}, journal = {J Am Med Inform Assoc}, volume = {22}, year = {2015}, month = {2015 Mar}, pages = {370-9}, abstract = {

OBJECTIVE: Clinical data warehouses have accelerated clinical research, but even with available open source tools, there is a high barrier to entry due to the complexity of normalizing and importing data. The Office of the National Coordinator for Health Information Technology{\textquoteright}s Meaningful Use Incentive Program now requires that electronic health record systems produce standardized consolidated clinical document architecture (C-CDA) documents. Here, we leverage this data source to create a low volume standards based import pipeline for the Informatics for Integrating Biology and the Bedside (i2b2) clinical research platform. We validate this approach by creating a small repository at Partners Healthcare automatically from C-CDA documents.

MATERIALS AND METHODS: We designed an i2b2 extension to import C-CDAs into i2b2. It is extensible to other sites with variances in C-CDA format without requiring custom code. We also designed new ontology structures for querying the imported data.

RESULTS: We implemented our methodology at Partners Healthcare, where we developed an adapter to retrieve C-CDAs from Enterprise Services. Our current implementation supports demographics, encounters, problems, and medications. We imported approximately 17 000 clinical observations on 145 patients into i2b2 in about 24 min. We were able to perform i2b2 cohort finding queries and view patient information through SMART apps on the imported data.

DISCUSSION: This low volume import approach can serve small practices with local access to C-CDAs and will allow patient registries to import patient supplied C-CDAs. These components will soon be available open source on the i2b2 wiki.

CONCLUSIONS: Our approach will lower barriers to entry in implementing i2b2 where informatics expertise or data access are limited.

}, keywords = {Biomedical Research, Continuity of Patient Care, Database Management Systems, Databases as Topic, Humans, Information Storage and Retrieval, Meaningful Use, Systems Integration}, issn = {1527-974X}, doi = {10.1136/amiajnl-2014-003040}, author = {Klann, Jeffrey G and Mendis, Michael and Phillips, Lori C and Goodson, Alyssa P and Rocha, Beatriz H and Goldberg, Howard S and Wattanasin, Nich and Murphy, Shawn N} } @article {980, title = {Virtualization of Open-Source Secure Web Services to Support Data Exchange in a Pediatric Critical Care Research Network.}, journal = {J Am Med Inform Assoc}, year = {2015}, month = {2015 Mar 21}, abstract = {

OBJECTIVES: To examine the feasibility of deploying a virtual web service for sharing data within a research network, and to evaluate the impact on data consistency and quality.

MATERIAL AND METHODS: Virtual machines (VMs) encapsulated an open-source, semantically and syntactically interoperable secure web service infrastructure along with a shadow database. The VMs were deployed to 8 Collaborative Pediatric Critical Care Research Network Clinical Centers.

RESULTS: Virtual web services could be deployed in hours. The interoperability of the web services reduced format misalignment from 56\% to 1\% and demonstrated that 99\% of the data consistently transferred using the data dictionary and 1\% needed human curation.

CONCLUSIONS: Use of virtualized open-source secure web service technology could enable direct electronic abstraction of data from hospital databases for research purposes.

}, issn = {1527-974X}, doi = {10.1093/jamia/ocv009}, author = {Frey, Lewis J and Sward, Katherine A and Newth, Christopher Jl and Khemani, Robinder G and Cryer, Martin E and Thelen, Julie L and Enriquez, Rene and Shaoyu, Su and Pollack, Murray M and Harrison, Rick E and Meert, Kathleen L and Berg, Robert A and Wessel, David L and Shanley, Thomas P and Dalton, Heidi and Carcillo, Joseph and Jenkins, Tammara L and Dean, J Michael} } @article {945, title = {ExpertEyes: Open-source, high-definition eyetracking.}, journal = {Behav Res Methods}, year = {2014}, month = {2014 Jun 17}, abstract = {

ExpertEyes is a low-cost, open-source package of hardware and software that is designed to provide portable high-definition eyetracking. The project involves several technological innovations, including portability, high-definition video recording, and multiplatform software support. It was designed for challenging recording environments, and all processing is done offline to allow for optimization of parameter estimation. The pupil and corneal reflection are estimated using a novel forward eye model that simultaneously fits both the pupil and the corneal reflection with full ellipses, addressing a common situation in which the corneal reflection sits at the edge of the pupil and therefore breaks the contour of the ellipse. The accuracy and precision of the system are comparable to or better than what is available in commercial eyetracking systems, with a typical accuracy of less than 0.4{\textdegree} and best accuracy below 0.3{\textdegree}, and with a typical precision (SD method) around 0.3{\textdegree} and best precision below 0.2{\textdegree}. Part of the success of the system comes from a high-resolution eye image. The high image quality results from uncasing common digital camcorders and recording directly to SD cards, which avoids the limitations of the analog NTSC format. The software is freely downloadable, and complete hardware plans are available, along with sources for custom parts.

}, issn = {1554-3528}, doi = {10.3758/s13428-014-0465-z}, author = {Parada, Francisco J and Wyatte, Dean and Yu, Chen and Akavipat, Ruj and Emerick, Brandi and Busey, Thomas} } @article {1051, title = {High dimensional biological data retrieval optimization with NoSQL technology.}, journal = {BMC Genomics}, volume = {15 Suppl 8}, year = {2014}, month = {2014}, pages = {S3}, abstract = {

BACKGROUND: High-throughput transcriptomic data generated by microarray experiments is the most abundant and frequently stored kind of data currently used in translational medicine studies. Although microarray data is supported in data warehouses such as tranSMART, when querying relational databases for hundreds of different patient gene expression records queries are slow due to poor performance. Non-relational data models, such as the key-value model implemented in NoSQL databases, hold promise to be more performant solutions. Our motivation is to improve the performance of the tranSMART data warehouse with a view to supporting Next Generation Sequencing data.

RESULTS: In this paper we introduce a new data model better suited for high-dimensional data storage and querying, optimized for database scalability and performance. We have designed a key-value pair data model to support faster queries over large-scale microarray data and implemented the model using HBase, an implementation of Google{\textquoteright}s BigTable storage system. An experimental performance comparison was carried out against the traditional relational data model implemented in both MySQL Cluster and MongoDB, using a large publicly available transcriptomic data set taken from NCBI GEO concerning Multiple Myeloma. Our new key-value data model implemented on HBase exhibits an average 5.24-fold increase in high-dimensional biological data query performance compared to the relational model implemented on MySQL Cluster, and an average 6.47-fold increase on query performance on MongoDB.

CONCLUSIONS: The performance evaluation found that the new key-value data model, in particular its implementation in HBase, outperforms the relational model currently implemented in tranSMART. We propose that NoSQL technology holds great promise for large-scale data management, in particular for high-dimensional biological data such as that demonstrated in the performance evaluation described in this paper. We aim to use this new data model as a basis for migrating tranSMART{\textquoteright}s implementation to a more scalable solution for Big Data.

}, keywords = {Database Management Systems, Databases, Genetic, High-Throughput Nucleotide Sequencing, Humans, Information Storage and Retrieval, Medical Informatics, Multiple Myeloma, Oligonucleotide Array Sequence Analysis, Transcriptome}, issn = {1471-2164}, doi = {10.1186/1471-2164-15-S8-S3}, author = {Wang, Shicai and Pandis, Ioannis and Wu, Chao and He, Sijin and Johnson, David and Emam, Ibrahim and Guitton, Florian and Guo, Yike} } @article {1033, title = {Making cytological diagnoses on digital images using the iPath network.}, journal = {Acta Cytol}, volume = {58}, year = {2014}, month = {2014}, pages = {453-60}, abstract = {

BACKGROUND: The iPath telemedicine platform Basel is mainly used for histological and cytological consultations, but also serves as a valuable learning tool.

AIM: To study the level of accuracy in making diagnoses based on still images achieved by experienced cytopathologists, to identify limiting factors, and to provide a cytological image series as a learning set.

METHOD: Images from 167 consecutive cytological specimens of different origin were uploaded on the iPath platform and evaluated by four cytopathologists. Only wet-fixed and well-stained specimens were used. The consultants made specific diagnoses and categorized each as benign, suspicious or malignant.

RESULTS: For all consultants, specificity and sensitivity regarding categorized diagnoses were 83-92 and 85-93\%, respectively; the overall accuracy was 88-90\%. The interobserver agreement was substantial (κ = 0.791). The lowest rate of concordance was achieved in urine and bladder washings and in the identification of benign lesions.

CONCLUSION: Using a digital image set for diagnostic purposes implies that even under optimal conditions the accuracy rate will not exceed to 80-90\%, mainly because of lacking supportive immunocytochemical or molecular tests. This limitation does not disqualify digital images for teleconsulting or as a learning aid. The series of images used for the study are open to the public at http://pathorama.wordpress.com/extragenital-cytology-2013/.

}, keywords = {Adolescent, Adult, Aged, Aged, 80 and over, Child, Child, Preschool, Computers, Handheld, Cytodiagnosis, Diagnosis, Differential, Female, Humans, Hyperplasia, Infant, Male, Metaplasia, Middle Aged, Neoplasms, Observer Variation, Reproducibility of Results, Sensitivity and Specificity, Telemedicine}, issn = {0001-5547}, doi = {10.1159/000369241}, author = {Dalquen, Peter and Savic Prince, Spasenija and Spieler, Peter and Kunze, Dietmar and Neumann, Heinrich and Eppenberger-Castori, Serenella and Adams, Heiner and Glatz, Katharina and Bubendorf, Lukas} } @article {931, title = {Managing multicentre clinical trials with open source.}, journal = {Inform Health Soc Care}, volume = {39}, year = {2014}, month = {2014 Mar}, pages = {67-80}, abstract = {

Background: Multicentre clinical trials are challenged by high administrative burden, data management pitfalls and costs. This leads to a reduced enthusiasm and commitment of the physicians involved and thus to a reluctance in conducting multicentre clinical trials. Objective: The purpose of this study was to develop a web-based open source platform to support a multi-centre clinical trial. Methods: We developed on Drupal, an open source software distributed under the terms of the General Public License, a web-based, multi-centre clinical trial management system with the design science research approach. Results: This system was evaluated by user-testing and well supported several completed and on-going clinical trials and is available for free download. Conclusion: Open source clinical trial management systems are capable in supporting multi-centre clinical trials by enhancing efficiency, quality of data management and collaboration.

}, issn = {1753-8165}, doi = {10.3109/17538157.2013.812647}, author = {Raptis, Dimitri Aristotle and Mettler, Tobias and Fischer, Michael Alexander and Patak, Michael and Lesurtel, Mickael and Eshmuminov, Dilmurodjon and de Rougemont, Olivier and Graf, Rolf and Clavien, Pierre-Alain and Breitenstein, Stefan} } @article {943, title = {Open-source electronic data capture system offered increased accuracy and cost-effectiveness compared with paper methods in Africa.}, journal = {J Clin Epidemiol}, year = {2014}, month = {2014 Aug 15}, abstract = {

OBJECTIVES: Existing electronic data capture options are often financially unfeasible in resource-poor settings or difficult to support technically in the field. To help facilitate large-scale multicenter studies in sub-Saharan Africa, the African Partnership for Chronic Disease Research (APCDR) has developed an open-source electronic questionnaire (EQ).

STUDY DESIGN AND SETTING: To assess its relative validity, we compared the EQ against traditional pen-and-paper methods using 200 randomized interviews conducted in an ongoing type 2 diabetes case-control study in South Africa.

RESULTS: During its 3-month validation, the EQ had a lower frequency of errors (EQ, 0.17 errors per 100 questions; paper, 0.73 errors per 100 questions; P-value <=0.001), and a lower monetary cost per correctly entered question, compared with the pen-and-paper method. We found no marked difference in the average duration of the interview between methods (EQ, 5.4~minutes; paper, 5.6~minutes).

CONCLUSION: This validation study suggests that the EQ may offer increased accuracy, similar interview duration, and increased cost-effectiveness compared with paper-based data collection methods. The APCDR EQ software is freely available (https://github.com/apcdr/questionnaire).

}, issn = {1878-5921}, doi = {10.1016/j.jclinepi.2014.06.012}, author = {Dillon, David G and Pirie, Fraser and Pomilla, Cristina and Sandhu, Manjinder S and Motala, Ayesha A and Young, Elizabeth H} } @article {1027, title = {Possible combinations of electronic data capture and randomization systems. principles and the realization with RANDI2 and OpenClinica.}, journal = {Methods Inf Med}, volume = {53}, year = {2014}, month = {2014}, pages = {202-7}, abstract = {

BACKGROUND: Clinical trials (CT) are in a wider sense experiments to prove and establish clinical benefit of treatments. Nowadays electronic data capture systems (EDCS) are used more often bringing a better data management and higher data quality into clinical practice. Also electronic systems for the randomization are used to assign the patients to the treatments.

OBJECTIVES: If the mentioned randomization system (RS) and EDCS are used, possibly identical data are collected in both, especially by stratified randomization. This separated data storage may lead to data inconsistency and in general data samples have to be aligned. The article discusses solutions to combine RS and EDCS. In detail one approach is realized and introduced.

METHODS: Different possible settings of combination of EDCS and RS are determined and the pros and cons for each solution are worked out. For the combination of two independent applications the necessary interfaces for the communication are defined. Thereby, existing standards are considered. An example realization is implemented with the help of open-source applications and state-of-the-art software development procedures.

RESULTS: Three possibilities of separate usage or combination of EDCS and RS are presented and assessed: i) the complete independent usage of both systems; ii) realization of one system with both functions; and iii) two separate systems, which communicate via defined interfaces. In addition a realization of our preferred approach, the combination of both systems, is introduced using the open source tools RANDI2 and OpenClinica.

CONCLUSION: The advantage of a flexible independent development of EDCS and RS is shown based on the fact that these tool are very different featured. In our opinion the combination of both systems via defined interfaces fulfills the requirements of randomization and electronic data capture and is feasible in practice. In addition, the use of such a setting can reduce the training costs and the error-prone duplicated data entry.

}, keywords = {Automatic Data Processing, Computer Communication Networks, Humans, Medical Informatics Computing, Medical Records Systems, Computerized, Random Allocation, Randomized Controlled Trials as Topic, Software Design}, issn = {0026-1270}, doi = {10.3414/ME13-01-0074}, author = {Schrimpf, D and Haag, M and Pilz, L R} } @article {1001, title = {tranSMART: An Open Source Knowledge Management and High Content Data Analytics Platform.}, journal = {AMIA Jt Summits Transl Sci Proc}, volume = {2014}, year = {2014}, month = {2014}, pages = {96-101}, abstract = {

The tranSMART knowledge management and high-content analysis platform is a flexible software framework featuring novel research capabilities. It enables analysis of integrated data for the purposes of hypothesis generation, hypothesis validation, and cohort discovery in translational research. tranSMART bridges the prolific world of basic science and clinical practice data at the point of care by merging multiple types of data from disparate sources into a common environment. The application supports data harmonization and integration with analytical pipelines. The application code was released into the open source community in January 2012, with 32 instances in operation. tranSMART{\textquoteright}s extensible data model and corresponding data integration processes, rapid data analysis features, and open source nature make it an indispensable tool in translational or clinical research.

}, issn = {2153-4063}, author = {Scheufele, Elisabeth and Aronzon, Dina and Coopersmith, Robert and McDuffie, Michael T and Kapoor, Manish and Uhrich, Christopher A and Avitabile, Jean E and Liu, Jinlei and Housman, Dan and Palchuk, Matvey B} } @article {865, title = {GBM volumetry using the 3D Slicer medical image computing platform.}, journal = {Sci Rep}, volume = {3}, year = {2013}, month = {2013}, pages = {1364}, abstract = {

Volumetric change in glioblastoma multiforme (GBM) over time is a critical factor in treatment decisions. Typically, the tumor volume is computed on a slice-by-slice basis using MRI scans obtained at regular intervals. (3D)Slicer - a free platform for biomedical research - provides an alternative to this manual slice-by-slice segmentation process, which is significantly faster and requires less user interaction. In this study, 4 physicians segmented GBMs in 10 patients, once using the competitive region-growing based GrowCut segmentation module of Slicer, and once purely by drawing boundaries completely manually on a slice-by-slice basis. Furthermore, we provide a variability analysis for three physicians for 12 GBMs. The time required for GrowCut segmentation was on an average 61\% of the time required for a pure manual segmentation. A comparison of Slicer-based segmentation with manual slice-by-slice segmentation resulted in a Dice Similarity Coefficient of 88.43 {\textpm} 5.23\% and a Hausdorff Distance of 2.32 {\textpm} 5.23 mm.

}, issn = {2045-2322}, doi = {10.1038/srep01364}, author = {Egger, Jan and Kapur, Tina and Fedorov, Andriy and Pieper, Steve and Miller, James V and Veeraraghavan, Harini and Freisleben, Bernd and Golby, Alexandra J and Nimsky, Christopher and Kikinis, Ron} } @article {895, title = {Revolutionizing patient control of health information.}, journal = {Can Fam Physician}, volume = {59}, year = {2013}, month = {2013 Aug}, pages = {823-4}, issn = {1715-5258}, author = {Chan, David and Howard, Michelle and Dolovich, Lisa and Bartlett, Gillian and Price, David} } @article {795, title = {BioImageXD: an open, general-purpose and high-throughput image-processing platform.}, journal = {Nat Methods}, volume = {9}, year = {2012}, month = {2012}, pages = {683-9}, abstract = {BioImageXD puts open-source computer science tools for three-dimensional visualization and analysis into the hands of all researchers, through a user-friendly graphical interface tuned to the needs of biologists. BioImageXD has no restrictive licenses or undisclosed algorithms and enables publication of precise, reproducible and modifiable workflows. It allows simple construction of processing pipelines and should enable biologists to perform challenging analyses of complex processes. We demonstrate its performance in a study of integrin clustering in response to selected inhibitors.}, issn = {1548-7105}, doi = {10.1038/nmeth.2047}, author = {Kankaanp{\"a}{\"a}, Pasi and Paavolainen, Lassi and Tiitta, Silja and Karjalainen, Mikko and P{\"a}iv{\"a}rinne, Joacim and Nieminen, Jonna and Marjom{\"a}ki, Varpu and Heino, Jyrki and White, Daniel J} } @article {776, title = {Building a robust, scalable and standards-driven infrastructure for secondary use of EHR data: The SHARPn project.}, journal = {J Biomed Inform}, year = {2012}, month = {2012 Feb 4}, abstract = {The Strategic Health IT Advanced Research Projects (SHARP) Program, established by the Office of the National Coordinator for Health Information Technology in 2010 supports research findings that remove barriers for increased adoption of health IT. The improvements envisioned by the SHARP Area 4 Consortium (SHARPn) will enable the use of the electronic health record (EHR) for secondary purposes, such as care process and outcomes improvement, biomedical research and epidemiologic monitoring of the nation{\textquoteright}s health. One of the primary informatics problem areas in this endeavor is the standardization of disparate health data from the nation{\textquoteright}s many health care organizations and providers. The SHARPn team is developing open source services and components to support the ubiquitous exchange, sharing and reuse or {\textquoteright}liquidity{\textquoteright} of operational clinical data stored in electronic health records. One year into the design and development of the SHARPn framework, we demonstrated end to end data flow and a prototype SHARPn platform, using thousands of patient electronic records sourced from two large healthcare organizations: Mayo Clinic and Intermountain Healthcare. The platform was deployed to (1) receive source EHR data in several formats, (2) generate structured data from EHR narrative text, and (3) normalize the EHR data using common detailed clinical models and Consolidated Health Informatics standard terminologies, which were (4) accessed by a phenotyping service using normalized data specifications. The architecture of this prototype SHARPn platform is presented. The EHR data throughput demonstration showed success in normalizing native EHR data, both structured and narrative, from two independent organizations and EHR systems. Based on the demonstration, observed challenges for standardization of EHR data for interoperable secondary use are discussed.}, issn = {1532-0480}, doi = {10.1016/j.jbi.2012.01.009}, author = {Rea, Susan and Pathak, Jyotishman and Savova, Guergana and Oniki, Thomas A and Westberg, Les and Beebe, Calvin E and Tao, Cui and Parker, Craig G and Haug, Peter J and Huff, Stanley M and Chute, Christopher G} } @article {816, title = {Developing open source, self-contained disease surveillance software applications for use in resource-limited settings.}, journal = {BMC Med Inform Decis Mak}, volume = {12}, year = {2012}, month = {2012}, pages = {99}, abstract = {UNLABELLED: ABSTRACT: BACKGROUND: Emerging public health threats often originate in resource-limited countries. In recognition of this fact, the World Health Organization issued revised International Health Regulations in 2005, which call for significantly increased reporting and response capabilities for all signatory nations. Electronic biosurveillance systems can improve the timeliness of public health data collection, aid in the early detection of and response to disease outbreaks, and enhance situational awareness. METHODS: As components of its Suite for Automated Global bioSurveillance (SAGES) program, The Johns Hopkins University Applied Physics Laboratory developed two open-source, electronic biosurveillance systems for use in resource-limited settings. OpenESSENCE provides web-based data entry, analysis, and reporting. ESSENCE Desktop Edition provides similar capabilities for settings without internet access. Both systems may be configured to collect data using locally available cell phone technologies. RESULTS: ESSENCE Desktop Edition has been deployed for two years in the Republic of the Philippines. Local health clinics have rapidly adopted the new technology to provide daily reporting, thus eliminating the two-to-three week data lag of the previous paper-based system. CONCLUSIONS: OpenESSENCE and ESSENCE Desktop Edition are two open-source software products with the capability of significantly improving disease surveillance in a wide range of resource-limited settings. These products, and other emerging surveillance technologies, can assist resource-limited countries compliance with the revised International Health Regulations.}, issn = {1472-6947}, doi = {10.1186/1472-6947-12-99}, author = {Campbell, Timothy C and Hodanics, Charles J and Babin, Steven M and Poku, Adjoa M and Wojcik, Richard A and Skora, Joseph F and Coberly, Jacqueline S and Mistry, Zarna S and Lewis, Sheri H} } @inbook {792, title = {Development of a Laparoscopic Surgical Training System with Simulation Open Framework Architecture (SOFA)}, booktitle = {Computer Aided Surgery}, series = {Proceedings in Information and Communications Technology}, volume = {3}, year = {2012}, pages = {83-91}, publisher = {Springer Japan}, organization = {Springer Japan}, abstract = {With a number of advantages over traditional laparotomy, laparoscopic surgery is a successful minimally invasive surgical procedure. However, laparoscopy demands high surgical skill. For efficient and safe training, virtual surgery simulation systems have been developed recently. In this paper, we describe the development of a laparoscopic surgery training system using Simulation Open Framework Architecture (SOFA). The simulation software was integrated with a two-hand haptic device specially developed for laparoscopic surgical training. As an example, we focused on the simulation of gallbladder removal surgery using laparoscopic instruments. Gallbladder removal was successfully simulated by the proposed methods. We expect this training system to be similarly successful in simulating a number of other surgical procedures.}, isbn = {978-4-431-54094-6}, url = {http://dx.doi.org/10.1007/978-4-431-54094-6_10}, author = {Kim, Youngjun and Kim, Kyunghwan and Roy, Fr{\'e}d{\'e}rick and Park, Sehyung}, editor = {Dohi, Takeyoshi and Liao, Hongen} } @article {799, title = {A framework for mapping, visualisation and automatic model creation of signal-transduction networks.}, journal = {Mol Syst Biol}, volume = {8}, year = {2012}, month = {2012}, pages = {578}, abstract = {Intracellular signalling systems are highly complex. This complexity makes handling, analysis and visualisation of available knowledge a major challenge in current signalling research. Here, we present a novel framework for mapping signal-transduction networks that avoids the combinatorial explosion by breaking down the network in reaction and contingency information. It provides two new visualisation methods and automatic export to mathematical models. We use this framework to compile the presently most comprehensive map of the yeast MAP kinase network. Our method improves previous strategies by combining (I) more concise mapping adapted to empirical data, (II) individual referencing for each piece of information, (III) visualisation without simplifications or added uncertainty, (IV) automatic visualisation in multiple formats, (V) automatic export to mathematical models and (VI) compatibility with established formats. The framework is supported by an open source software tool that facilitates integration of the three levels of network analysis: definition, visualisation and mathematical modelling. The framework is species independent and we expect that it will have wider impact in signalling research on any system.}, issn = {1744-4292}, doi = {10.1038/msb.2012.12}, author = {Tiger, Carl-Fredrik and Krause, Falko and Cedersund, Gunnar and Palm{\'e}r, Robert and Klipp, Edda and Hohmann, Stefan and Kitano, Hiroaki and Krantz, Marcus} } @article {815, title = {Free and open-source software application for the evaluation of coronary computed tomography angiography images.}, journal = {Arq Bras Cardiol}, year = {2012}, month = {2012 Oct 2}, abstract = {BACKGROUND: The standardization of images used in Medicine in 1993 was performed using the DICOM (Digital Imaging and Communications in Medicine) standard. Several tests use this standard and it is increasingly necessary to design software applications capable of handling this type of image; however, these software applications are not usually free and open-source, and this fact hinders their adjustment to most diverse interests. OBJECTIVE: To develop and validate a free and open-source software application capable of handling DICOM coronary computed tomography angiography images. METHODS: We developed and tested the ImageLab software in the evaluation of 100 tests randomly selected from a database. We carried out 600 tests divided between two observers using ImageLab and another software sold with Philips Brilliance computed tomography appliances in the evaluation of coronary lesions and plaques around the left main coronary artery (LMCA) and the anterior descending artery (ADA). To evaluate intraobserver, interobserver and intersoftware agreements, we used simple and kappa statistics agreements. RESULTS: The agreements observed between software applications were generally classified as substantial or almost perfect in most comparisons. CONCLUSION: The ImageLab software agreed with the Philips software in the evaluation of coronary computed tomography angiography tests, especially in patients without lesions, with lesions < 50\% in the LMCA and < 70\% in the ADA. The agreement for lesions > 70\% in the ADA was lower, but this is also observed when the anatomical reference standard is used.}, issn = {1678-4170}, author = {Hadlich, Marcelo Souza and Oliveira, Gl{\'a}ucia Maria Moraes and Feij{\'o}o, Ra{\'u}l A and Azevedo, Clerio F and Tura, Bernardo Rangel and Ziemer, Paulo Gustavo Portela and Blanco, Pablo Javier and Pina, Gustavo and Meira, M{\'a}rcio and Souza E Silva, Nelson Albuquerque de} } @article {793, title = {Integrating clinical practice and public health surveillance using electronic medical record systems.}, journal = {Am J Prev Med}, volume = {42}, year = {2012}, month = {2012 Jun}, pages = {S154-62}, abstract = {Electronic medical record (EMR) systems have rich potential to improve integration between primary care and the public health system at the point of care. EMRs make it possible for clinicians to contribute timely, clinically detailed surveillance data to public health practitioners without changing their existing workflows or incurring extra work. New surveillance systems can extract raw data from providers{\textquoteright} EMRs, analyze them for conditions of public health interest, and automatically communicate results to health departments. The current paper describes a model EMR-based public health surveillance platform called Electronic Medical Record Support for Public Health (ESP). The ESP platform provides live, automated surveillance for notifiable diseases, influenza-like illness, and diabetes prevalence, care, and complications. Results are automatically transmitted to state health departments.}, issn = {1873-2607}, doi = {10.1016/j.amepre.2012.04.005}, author = {Klompas, Michael and McVetta, Jason and Lazarus, Ross and Eggleston, Emma and Haney, Gillian and Kruskal, Benjamin A and Yih, W Katherine and Daly, Patricia and Oppedisano, Paul and Beagan, Brianne and Lee, Michael and Kirby, Chaim and Heisey-Grove, Dawn and DeMaria, Alfred and Platt, Richard} } @article {908, title = {The National Alliance for Medical Image Computing, a roadmap initiative to build a free and open source software infrastructure for translational research in medical image analysis}, journal = {Journal of the American Medical Informatics Association}, volume = {19}, year = {2012}, pages = {176{\textendash}180}, abstract = {The National Alliance for Medical Image Computing (NA-MIC), is a multi-institutional, interdisciplinary community of researchers, who share the recognition that modern health care demands improved technologies to ease suffering and prolong productive life. Organized under the National Centers for Biomedical Computing 7 years ago, the mission of NA-MIC is to implement a robust and flexible open-source infrastructure for developing and applying advanced imaging technologies across a range of important biomedical research disciplines. A measure of its success, NA-MIC is now applying this technology to diseases that have immense impact on the duration and quality of life: cancer, heart disease, trauma, and degenerative genetic diseases. The targets of this technology range from group comparisons to subject-specific analysis.}, url = {http://www.scopus.com/inward/record.url?eid=2-s2.0-84857161947\&partnerID=40\&md5=c7d7cbab9304114a219812cb7c5091ee}, author = {Kapur, T.a and Pieper, S.b and Whitaker, R.c and Aylward, S.d and Jakab, M.a and Schroeder, W.d and Kikinis, R.a} } @inbook {789, title = {SOFA: A Multi-Model Framework for Interactive Physical Simulation}, booktitle = {Soft Tissue Biomechanical Modeling for Computer Assisted Surgery}, series = {Studies in Mechanobiology, Tissue Engineering and Biomaterials}, volume = {11}, year = {2012}, pages = {283-321}, publisher = {Springer Berlin Heidelberg}, organization = {Springer Berlin Heidelberg}, abstract = {Simulation Open Framework Architecture (SOFA) is an open-source C++ library primarily targeted at interactive computational medical simulation. SOFA facilitates collaborations between specialists from various domains, by decomposing complex simulators into components designed independently and organized in a scenegraph data structure. Each component encapsulates one of the aspects of a simulation, such as the degrees of freedom, the forces and constraints, the differential equations, the main loop algorithms, the linear solvers, the collision detection algorithms or the interaction devices. The simulated objects can be represented using several models, each of them optimized for a different task such as the computation of internal forces, collision detection, haptics or visual display. These models are synchronized during the simulation using a mapping mechanism. CPU and GPU implementations can be transparently combined to exploit the computational power of modern hardware architectures. Thanks to this flexible yet efficient architecture, SOFA can be used as a test-bed to compare models and algorithms, or as a basis for the development of complex, high-performance simulators.}, isbn = {978-3-642-29013-8}, doi = {10.1007/8415_2012_125}, url = {http://dx.doi.org/10.1007/8415_2012_125}, author = {Faure, Fran{\c c}ois and Duriez, Christian and Delingette, Herv{\'e} and Allard, J{\'e}r{\'e}mie and Gilles, Benjamin and Marchesseau, St{\'e}phanie and Talbot, Hugo and Courtecuisse, Hadrien and Bousquet, Guillaume and Peterlik, Igor and Cotin, St{\'e}phane}, editor = {Payan, Yohan} } @article {826, title = {Ten simple rules for the open development of scientific software.}, journal = {PLoS Comput Biol}, volume = {8}, year = {2012}, month = {2012 Dec}, pages = {e1002802}, issn = {1553-7358}, doi = {10.1371/journal.pcbi.1002802}, author = {Prli{\'c}, Andreas and Procter, James B} } @inbook {786, title = {VURTIGO: Visualization Platform for Real-Time, MRI-Guided Cardiac Electroanatomic Mapping}, booktitle = {Statistical Atlases and Computational Models of the Heart. Imaging and Modelling Challenges}, series = {Lecture Notes in Computer Science}, volume = {7085}, year = {2012}, pages = {244-253}, publisher = {Springer Berlin / Heidelberg}, organization = {Springer Berlin / Heidelberg}, abstract = {Guidance of electrophysiological (EP) procedures by magnetic resonance imaging (MRI) has significant advantages over x-ray fluoroscopy. Display of electroanatomic mapping (EAM) during an intervention fused with a prior MR volume and DE-MRI derived tissue classification should improve the accuracy of cardiac resynchronization therapy (CRT) for ventricular arrhythmias. Improved accuracy in the spatial localization of recorded EP points will produce an EAM to constrain and customize patient-specific cardiac electroanatomic models being developed for understanding the patterns of arrhythmogenic slow conduction zones causing reentry circuits and treatment planning. The Vurtigo software presented here is a four dimensional (3D+time) real-time visualization application for guiding interventions capable of displaying prior volumes, real-time MRI scan planes, EAM (voltage or activation times), segmented models, and tracked catheters. This paper will describe the architecture and features of Vurtigo followed by the application example of guiding percutaneous cardiac electroanatomic mapping in porcine models.}, isbn = {978-3-642-28325-3}, url = {http://dx.doi.org/10.1007/978-3-642-28326-0_25}, author = {Radau, Perry and Pintilie, Stefan and Flor, Roey and Biswas, Labonny and Oduneye, Samuel and Ramanan, Venkat and Anderson, Kevan and Wright, Graham}, editor = {Camara, Oscar and Konukoglu, Ender and Pop, Mihaela and Rhode, Kawal and Sermesant, Maxime and Young, Alistair} } @article {575, title = {Brainstorm: A User-Friendly Application for MEG/EEG Analysis}, journal = {Computational Intelligence and Neuroscience}, volume = {2011}, year = {2011}, month = {01/2011}, abstract = {Brainstorm is a collaborative open-source application dedicated to magnetoencephalography (MEG) and electroencephalography (EEG) data visualization and processing, with an emphasis on cortical source estimation techniques and their integration with anatomical magnetic resonance imaging (MRI) data. The primary objective of the software is to connect MEG/EEG neuroscience investigators with both the best-established and cutting-edge methods through a simple and intuitive graphical user interface (GUI).}, doi = {10.1155/2011/879716}, url = {http://www.hindawi.com/journals/cin/2011/879716/}, author = {Fran{\c c}ois Tadel and Sylvain Baillet and John C. Mosher and Dimitrios Pantazis} } @article {597, title = {Forward field computation with OpenMEEG.}, journal = {Computational intelligence and neuroscience}, volume = {2011}, year = {2011}, month = {2011}, pages = {923703}, abstract = {To recover the sources giving rise to electro- and magnetoencephalography in individual measurements, realistic physiological modeling is required, and accurate numerical solutions must be computed. We present OpenMEEG, which solves the electromagnetic forward problem in the quasistatic regime, for head models with piecewise constant conductivity. The core of OpenMEEG consists of the symmetric Boundary Element Method, which is based on an extended Green Representation theorem. OpenMEEG is able to provide lead fields for four different electromagnetic forward problems: Electroencephalography (EEG), Magnetoencephalography (MEG), Electrical Impedance Tomography (EIT), and intracranial electric potentials (IPs). OpenMEEG is open source and multiplatform. It can be used from Python and Matlab in conjunction with toolboxes that solve the inverse problem; its integration within FieldTrip is operational since release 2.0.}, issn = {1687-5273}, author = {Gramfort, Alexandre and Papadopoulo, Th{\'e}odore and Olivi, Emmanuel and Clerc, Maureen} } @article {594, title = {Implementation of the Zambia electronic perinatal record system for comprehensive prenatal and delivery care.}, journal = {International journal of gynaecology and obstetrics: the official organ of the International Federation of Gynaecology and Obstetrics}, volume = {113}, year = {2011}, month = {2011 May}, pages = {131-6}, abstract = {OBJECTIVE: To characterize prenatal and delivery care in an urban African setting. METHODS: The Zambia Electronic Perinatal Record System (ZEPRS) was implemented to record demographic characteristics, past medical and obstetric history, prenatal care, and delivery and newborn care for pregnant women across 25 facilities in the Lusaka public health sector. RESULTS: From June 1, 2007, to January 31, 2010, 115552 pregnant women had prenatal and delivery information recorded in ZEPRS. Median gestation age at first prenatal visit was 23weeks (interquartile range [IQR] 19-26). Syphilis screening was documented in 95663 (83\%) pregnancies: 2449 (2.6\%) women tested positive, of whom 1589 (64.9\%) were treated appropriately. 111108 (96\%) women agreed to HIV testing, of whom 22\% were diagnosed with HIV. Overall, 112813 (98\%) of recorded pregnancies resulted in a live birth, and 2739 (2\%) in a stillbirth. The median gestational age was 38weeks (IQR 35-40) at delivery; the median birth weight of newborns was 3000g (IQR 2700-3300g). CONCLUSION: The results demonstrate the feasibility of using a comprehensive electronic medical record in an urban African setting, and highlight its important role in ongoing efforts to improve clinical care.}, issn = {1879-3479}, author = {Chi, Benjamin H and Vwalika, Bellington and Killam, William P and Wamalume, Chibesa and Giganti, Mark J and Mbewe, Reuben and Stringer, Elizabeth M and Chintu, Namwinga T and Putta, Nande B and Liu, Katherine C and Chibwesha, Carla J and Rouse, Dwight J and Stringer, Jeffrey S A} } @article {538, title = {Integration of 3D anatomical data obtained by CT imaging and 3D optical scanning for Computer Aided Implant Surgery.}, journal = {BMC medical imaging}, volume = {11}, year = {2011}, month = {2011 Feb 21}, pages = {5}, abstract = {ABSTRACT: BACKGROUND: A precise placement of dental implants is a crucial step to optimize both prosthetic aspects and functional constraints. In this context, the use of virtual guiding systems has been recognized as a fundamental tool to control the ideal implant position. In particular, complex periodontal surgeries can be performed using preoperative planning based on CT data. The critical point of the procedure relies on the lack of accuracy in transferring CT planning information to surgical field through custom-made stereo-lithographic surgical guides. METHODS: In this work, a novel methodology is proposed for monitoring loss of accuracy in transferring CT dental information into periodontal surgical field. The methodology is based on integrating 3D data of anatomical (impression and cast) and preoperative (radiographic template) models, obtained by both CT and optical scanning processes. RESULTS: A clinical case, relative to a fully edentulous jaw patient, has been used as test case to assess the accuracy of the various steps concurring in manufacturing surgical guides. In particular, a surgical guide has been designed to place implants in the bone structure of the patient. The analysis of the results has allowed the clinician to monitor all the errors, which have been occurring step by step manufacturing the physical templates. CONCLUSIONS: The use of an optical scanner, which has a higher resolution and accuracy than CT scanning, has demonstrated to be a valid support to control the precision of the various physical models adopted and to point out possible error sources. A case study regarding a fully edentulous patient has confirmed the feasibility of the proposed methodology.}, issn = {1471-2342}, author = {Frisardi, Gianni and Chessa, Giacomo and Barone, Sandro and Paoli, Alessandro and Razionale, Armando and Frisardi, Flavio} } @article {573, title = {mantisGRID: A Grid Platform for DICOM Medical Images Management in Colombia and Latin America.}, journal = {Journal of digital imaging : the official journal of the Society for Computer Applications in Radiology}, volume = {24}, year = {2011}, month = {2011 Apr}, pages = {271-83}, abstract = {This paper presents the mantisGRID project, an interinstitutional initiative from Colombian medical and academic centers aiming to provide medical grid services for Colombia and Latin America. The mantisGRID is a GRID platform, based on open source grid infrastructure that provides the necessary services to access and exchange medical images and associated information following digital imaging and communications in medicine (DICOM) and health level 7 standards. The paper focuses first on the data abstraction architecture, which is achieved via Open Grid Services Architecture Data Access and Integration (OGSA-DAI) services and supported by the Globus Toolkit. The grid currently uses a 30-Mb bandwidth of the Colombian High Technology Academic Network, RENATA, connected to Internet 2. It also includes a discussion on the relational database created to handle the DICOM objects that were represented using Extensible Markup Language Schema documents, as well as other features implemented such as data security, user authentication, and patient confidentiality. Grid performance was tested using the three current operative nodes and the results demonstrated comparable query times between the mantisGRID (OGSA-DAI) and Distributed mySQL databases, especially for a large number of records.}, issn = {1618-727X}, author = {Garcia Ruiz, Manuel and Garcia Chaves, Alvin and Ruiz Iba{\~n}ez, Carlos and Gutierrez Mazo, Jorge Mario and Ramirez Giraldo, Juan Carlos and Pelaez Echavarria, Alejandro and Valencia Diaz, Edison and Pelaez Restrepo, Gustavo and Montoya Munera, Edwin Nelson and Garcia Loaiza, Bernardo and Gomez Gonzalez, Sebastian} } @inbook {springerlink:10.1007/978-1-4419-8204-9_9, title = {Medical Image Registration}, booktitle = {Multi Modality State-of-the-Art Medical Image Segmentation and Registration Methodologies}, year = {2011}, note = {10.1007/978-1-4419-8204-9_9}, pages = {227-245}, publisher = {Springer New York}, organization = {Springer New York}, abstract = {In this chapter, we cover the necessary background information required to understand medical image registration, the basic tools required to implement registration algorithms, and demonstrate a complete application for various types of registration between different modalities using freely available and maintained software.}, isbn = {978-1-4419-8204-9}, url = {http://dx.doi.org/10.1007/978-1-4419-8204-9_9}, author = {Aladl, Usaf E. and Peters, Terry}, editor = {El-Baz, Ayman S. and Acharya U, Rajendra and Laine, Andrew F. and Suri, Jasjit S.} } @article {1155, title = {The {SHARPn} project on secondary use of {Electronic} {Medical} {Record} data: progress, plans, and possibilities}, journal = {AMIA ... Annual Symposium proceedings. AMIA Symposium}, volume = {2011}, year = {2011}, pages = {248{\textendash}256}, abstract = {SHARPn is a collaboration among 16 academic and industry partners committed to the production and distribution of high-quality software artifacts that support the secondary use of EMR data. Areas of emphasis are data normalization, natural language processing, high-throughput phenotyping, and data quality metrics. Our work avails the industrial scalability afforded by the Unstructured Information Management Architecture (UIMA) from IBM Watson Research labs, the same framework which underpins the Watson Jeopardy demonstration. This descriptive paper outlines our present work and achievements, and presages our trajectory for the remainder of the funding period. The project is one of the four Strategic Health IT Advanced Research Projects (SHARP) projects funded by the Office of the National Coordinator in 2010.}, keywords = {Algorithms, Biomedical Research, Cooperative Behavior, Data Mining, electronic health records, Natural Language Processing, Software}, issn = {1942-597X}, author = {Chute, Christopher G. and Pathak, Jyotishman and Savova, Guergana K. and Bailey, Kent R. and Schor, Marshall I. and Hart, Lacey A. and Beebe, Calvin E. and Huff, Stanley M.} } @article {529, title = {AngioLab: Integrated technology for patient-specific management of intracranial aneurysms.}, journal = {Conference proceedings : ... Annual International Conference of the IEEE Engineering in Medicine and Biology Society. IEEE Engineering in Medicine and Biology Society. Conference}, volume = {1}, year = {2010}, month = {2010}, pages = {6801-4}, abstract = {AngioLab is a software tool developed within the GIMIAS framework and is part of a more ambitious pipeline for the integrated management of cerebral aneurysms. AngioLab currently includes three plug-ins: angio segmentation, angio morphology and stenting, as well as supports advanced rendering techniques for the visualization of virtual angiographies. In December 2009, 23 clinicians completed an evaluation questionnaire about AngioLab. This activity was part of a teaching course held during the 2(nd) European Society for Minimally Invasive Neurovascular Treatment (ESMINT) Teaching Course held at the Universitat Pompeu Fabra, Barcelona, Spain. The Automated Morphological Analysis (angio morphology plug-in) and the Endovascular Treatment Planning (stenting plug-in) were evaluated. In general, the results provided by these tools were considered as relevant and as an emerging need in their clinical field.}, issn = {1557-170X}, author = {Villa-Uriol, M C and Larrabide, I and Geers, A J and Pozo, J and Bogunovic, H and Mazzeo, M and Omedas, P and Barbarito, V and Carotenuto, L and Riccobene, C and Planes, X and Martelli, Y and Frangi, A F} } @article {1013, title = {Combining vital events registration, verbal autopsy and electronic medical records in rural Ghana for improved health services delivery.}, journal = {Stud Health Technol Inform}, volume = {160}, year = {2010}, month = {2010}, pages = {416-20}, abstract = {

This paper describes the process of implementing a low-cost {\textquoteright}real-time{\textquoteright} vital registration and verbal autopsy system integrated within an electronic medical record within the Millennium Village cluster in rural Ghana. Using MGV-Net, an open source health information architecture built around the OpenMRS platform, a total of 2378 births were registered between January 2007 and June 2009. The percentage of births registered in the health facility under supervision of a skilled attendant increased substantially over the course of the project from median of 35\% in 2007 to 64\% in 2008 and 85\% midway through 2009. Building additional clinics to reduce distance to facility and using the CHEWs to refer women for delivery in the clinics are possible explanations for the success in the vital registration. The integration of vital registration and verbal autopsies with the MGV-Net information system makes it possible for rapid assessment of effectiveness and provides important feedback to local providers and the Millennium Villages Project.

}, keywords = {Cause of Death, Database Management Systems, Delivery of Health Care, electronic health records, Ghana, Medical Record Linkage, Quality Assurance, Health Care, Quality Improvement, Registries, Rural Health Services, Vital Statistics}, issn = {0926-9630}, author = {Ohemeng-Dapaah, Seth and Pronyk, Paul and Akosa, Eric and Nemser, Bennett and Kanter, Andrew S} } @article {20517664, title = {E-health integration and interoperability based on open-source information technology.}, journal = {Wiener klinische Wochenschrift}, volume = {122 Suppl 2}, year = {2010}, month = {2010 May}, pages = {3-10}, author = {Dinevski, Dejan and Poli, Andrea and Krajnc, Ivan and Sustersic, Olga and Arh, Tanja} } @article {904, title = {Electronic data capture for registries and clinical trials in orthopaedic surgery: Open source versus commercial systems}, journal = {Clinical Orthopaedics and Related Research}, volume = {468}, year = {2010}, pages = {2664{\textendash}2671}, abstract = {Background: Collection and analysis of clinical data can help orthopaedic surgeons to practice evidence based medicine. Spreadsheets and offline relational databases are prevalent, but not flexible, secure, workflow friendly and do not support the generation of standardized and interoperable data. Additionally these data collection applications usually do not follow a structured and planned approach which may result in failure to achieve the intended goal. Questions/purposes: Our purposes are (1) to provide a brief overview of EDC systems, their types, and related pros and cons as well as to describe commonly used EDC platforms and their features; and (2) describe simple steps involved in designing a registry/clinical study in DADOS P, an open source EDC system. Where are we now?: Electronic data capture systems aimed at addressing these issues are widely being adopted at an institutional/national/ international level but are lacking at an individual level. A wide array of features, relative pros and cons and different business models cause confusion and indecision among orthopaedic surgeons interested in implementing EDC systems. Where do we need to go?: To answer clinical questions and actively participate in clinical studies, orthopaedic surgeons should collect data in parallel to their clinical activities. Adopting a simple, user-friendly, and robust EDC system can facilitate the data collection process. How do we get there?: Conducting a balanced evaluation of available options and comparing them with intended goals and requirements can help orthopaedic surgeons to make an informed choice. {\textcopyright} 2010 The Association of Bone and Joint Surgeons{\textregistered}.}, url = {http://www.scopus.com/inward/record.url?eid=2-s2.0-77957336660\&partnerID=40\&md5=dec3706be86215e9656a06f82265cb28}, author = {Shah, J.a b and Rajgor, D.a b and Pradhan, S.a b and McCready, M.c and Zaveri, A.a d and Pietrobon, R.c} } @article {1054, title = {How informatics can potentiate precompetitive open-source collaboration to jump-start drug discovery and development.}, journal = {Clin Pharmacol Ther}, volume = {87}, year = {2010}, month = {2010 May}, pages = {614-6}, keywords = {Animals, Cooperative Behavior, Drug Discovery, Drug Industry, Economic Competition, Humans, Informatics, Information Dissemination}, issn = {1532-6535}, doi = {10.1038/clpt.2010.21}, author = {Perakslis, E D and Van Dam, J and Szalma, S} } @article {10.1371/journal.pone.0014094, title = {JULIDE: A Software Tool for 3D Reconstruction and Statistical Analysis of Autoradiographic Mouse Brain Sections}, journal = {PLoS ONE}, volume = {5}, number = {11}, year = {2010}, month = {11}, pages = {e14094}, publisher = {Public Library of Science}, abstract = {

In this article we introduce JULIDE, a software toolkit developed to perform the 3D reconstruction, intensity normalization, volume standardization by 3D image registration and voxel-wise statistical analysis of autoradiographs of mouse brain sections. This software tool has been developed in the open-source ITK software framework and is freely available under a GPL license. The article presents the complete image processing chain from raw data acquisition to 3D statistical group analysis. Results of the group comparison in the context of a study on spatial learning are shown as an illustration of the data that can be obtained with this tool.

}, doi = {10.1371/journal.pone.0014094}, url = {http://dx.doi.org/10.1371\%2Fjournal.pone.0014094}, author = {Ribes, Delphine and Parafita, Julia and Charrier, R{\'e}mi and Magara, Fulvio and Magistretti, Pierre J. and Thiran, Jean-Philippe} } @article {1042, title = {JULIDE: a software tool for 3D reconstruction and statistical analysis of autoradiographic mouse brain sections.}, journal = {PLoS One}, volume = {5}, year = {2010}, month = {2010}, pages = {e14094}, abstract = {

In this article we introduce JULIDE, a software toolkit developed to perform the 3D reconstruction, intensity normalization, volume standardization by 3D image registration and voxel-wise statistical analysis of autoradiographs of mouse brain sections. This software tool has been developed in the open-source ITK software framework and is freely available under a GPL license. The article presents the complete image processing chain from raw data acquisition to 3D statistical group analysis. Results of the group comparison in the context of a study on spatial learning are shown as an illustration of the data that can be obtained with this tool.

}, keywords = {Animals, Autoradiography, Brain, Carbon Radioisotopes, Deoxyglucose, Image Processing, Computer-Assisted, Imaging, Three-Dimensional, Male, Maze Learning, Mice, Mice, Inbred C57BL, Reproducibility of Results, Software}, issn = {1932-6203}, doi = {10.1371/journal.pone.0014094}, author = {Ribes, Delphine and Parafita, Julia and Charrier, R{\'e}mi and Magara, Fulvio and Magistretti, Pierre J and Thiran, Jean-Philippe} } @article {19910667, title = {The medical exploration toolkit: an efficient support for visual computing in surgical planning and training.}, journal = {IEEE transactions on visualization and computer graphics}, volume = {16}, year = {2010}, month = {2010 Jan-Feb}, pages = {133-46}, abstract = {Application development is often guided by the usage of software libraries and toolkits. For medical applications, the toolkits currently available focus on image analysis and volume rendering. Advance interactive visualizations and user interface issues are not adequately supported. Hence, we present a toolkit for application development in the field of medical intervention planning, training, and presentation--the MEDICALEXPLORATIONTOOLKIT (METK). The METK is based on the rapid prototyping platform MeVisLab and offers a large variety of facilities for an easy and efficient application development process. We present dedicated techniques for advanced medical visualizations, exploration, standardized documentation, adn interface widgets for common tasks. These include, e.g., advanced animation facilities, viewpoint selection, several illustrative rendering techniques, and new techniques for object selection in 3D surface models. No extended programming skills are needed for application building, since a graphical programming approach can be used. the toolkit is freely available and well documented to facilitate the use and extension of the toolkit.}, author = {M{\"u}hler, Konrad and Tietjen, Christian and Ritter, Felix and Preim, Bernhard} } @article {596, title = {OpenMEEG: opensource software for quasistatic bioelectromagnetics.}, journal = {Biomedical engineering online}, volume = {9}, year = {2010}, month = {2010}, pages = {45}, abstract = {Interpreting and controlling bioelectromagnetic phenomena require realistic physiological models and accurate numerical solvers. A semi-realistic model often used in practise is the piecewise constant conductivity model, for which only the interfaces have to be meshed. This simplified model makes it possible to use Boundary Element Methods. Unfortunately, most Boundary Element solutions are confronted with accuracy issues when the conductivity ratio between neighboring tissues is high, as for instance the scalp/skull conductivity ratio in electro-encephalography. To overcome this difficulty, we proposed a new method called the symmetric BEM, which is implemented in the OpenMEEG software. The aim of this paper is to present OpenMEEG, both from the theoretical and the practical point of view, and to compare its performances with other competing software packages.}, keywords = {Benchmarking, Computers, Electric Impedance, Electricity, Electroencephalography, Electromagnetic Phenomena, Licensure, Magnetics, Magnetoencephalography, Models, Theoretical, Quality Control, Software, Time Factors, Tomography}, issn = {1475-925X}, author = {Gramfort, Alexandre and Papadopoulo, Th{\'e}odore and Olivi, Emmanuel and Clerc, Maureen} } @conference {panchal:3245, title = {SU-GG-T-260: Dicompyler: An Open Source Radiation Therapy Research Platform with a Plugin Architecture}, volume = {37}, number = {6}, year = {2010}, pages = {3245-3245}, publisher = {AAPM}, organization = {AAPM}, doi = {10.1118/1.3468652}, url = {http://link.aip.org/link/?MPH/37/3245/2}, author = {A Panchal and R Keyes} } @article {523, title = {Barriers to open source software adoption in Quebec{\textquoteright}s health care organizations.}, journal = {Journal of medical systems}, volume = {33}, year = {2009}, month = {2009 Feb}, pages = {1-7}, abstract = {We conducted in-depth interviews with 15 CIOs to identify the principal impediments to adoption of open source software in the Quebec health sector. We found that key factors for not adopting an open source solution were closely linked to the orientations of ministry level policy makers and a seeming lack of information on the part of operational level IT managers concerning commercially oriented open source providers. We use the case of recent changes in the structure of Quebec{\textquoteright}s health care organizations and a change in the commercial policies of a key vendor to illustrate our conclusions regarding barriers to adoption of open source products.}, keywords = {Attitude of Health Personnel, Delivery of Health Care, Health Facility Administrators, Health Policy, Humans, Medical Informatics, Organizational Innovation, Quebec, Software, Systems Integration, Technology Transfer}, issn = {0148-5598}, author = {Par{\'e}, Guy and Wybo, Michael D and Delannoy, Charles} } @article {499, title = {Bayesian analysis of neuroimaging data in FSL.}, journal = {NeuroImage}, volume = {45}, year = {2009}, month = {2009 Mar}, pages = {S173-86}, abstract = {Typically in neuroimaging we are looking to extract some pertinent information from imperfect, noisy images of the brain. This might be the inference of percent changes in blood flow in perfusion FMRI data, segmentation of subcortical structures from structural MRI, or inference of the probability of an anatomical connection between an area of cortex and a subthalamic nucleus using diffusion MRI. In this article we will describe how Bayesian techniques have made a significant impact in tackling problems such as these, particularly in regards to the analysis tools in the FMRIB Software Library (FSL). We shall see how Bayes provides a framework within which we can attempt to infer on models of neuroimaging data, while allowing us to incorporate our prior belief about the brain and the neuroimaging equipment in the form of biophysically informed or regularising priors. It allows us to extract probabilistic information from the data, and to probabilistically combine information from multiple modalities. Bayes can also be used to not only compare and select between models of different complexity, but also to infer on data using committees of models. Finally, we mention some analysis scenarios where Bayesian methods are impractical, and briefly discuss some practical approaches that we have taken in these cases.}, keywords = {Bayes Theorem, Brain, Diffusion Magnetic Resonance Imaging, Humans, Image Interpretation, Computer-Assisted, Software}, issn = {1095-9572}, author = {Woolrich, Mark W and Jbabdi, Saad and Patenaude, Brian and Chappell, Michael and Makni, Salima and Behrens, Timothy and Beckmann, Christian and Jenkinson, Mark and Smith, Stephen M} } @article {18952940, title = {Electronic Support for Public Health: validated case finding and reporting for notifiable diseases using electronic medical data.}, journal = {Journal of the American Medical Informatics Association : JAMIA}, volume = {16}, year = {2009}, month = {2009 Jan-Feb}, pages = {18-24}, abstract = {Health care providers are legally obliged to report cases of specified diseases to public health authorities, but existing manual, provider-initiated reporting systems generally result in incomplete, error-prone, and tardy information flow. Automated laboratory-based reports are more likely accurate and timely, but lack clinical information and treatment details. Here, we describe the Electronic Support for Public Health (ESP) application, a robust, automated, secure, portable public health detection and messaging system for cases of notifiable diseases. The ESP application applies disease specific logic to any complete source of electronic medical data in a fully automated process, and supports an optional case management workflow system for case notification control. All relevant clinical, laboratory and demographic details are securely transferred to the local health authority as an HL7 message. The ESP application has operated continuously in production mode since January 2007, applying rigorously validated case identification logic to ambulatory EMR data from more than 600,000 patients. Source code for this highly interoperable application is freely available under an approved open-source license at http://esphealth.org.}, author = {Lazarus, Ross and Klompas, Michael and Campion, Francis X and McNabb, Scott J N and Hou, Xuanlin and Daniel, James and Haney, Gillian and DeMaria, Alfred and Lenert, Leslie and Platt, Richard} } @inbook {springerlink:10.1007/978-3-642-01932-6_45, title = {GIMIAS: An Open Source Framework for Efficient Development of Research Tools and Clinical Prototypes}, booktitle = {Functional Imaging and Modeling of the Heart}, series = {Lecture Notes in Computer Science}, volume = {5528}, year = {2009}, note = {10.1007/978-3-642-01932-6_45}, pages = {417-426}, publisher = {Springer Berlin / Heidelberg}, organization = {Springer Berlin / Heidelberg}, abstract = {GIMIAS is a workflow-oriented environment for addressing advanced biomedical image computing and build personalized computational models, which is extensible through the development of application-specific plug-ins. In addition, GIMIAS provides an open source framework for efficient development of research and clinical software prototypes integrating contributions from the Virtual Physiological Human community while allowing business-friendly technology transfer and commercial product development. This framework has been fully developed in ANSI-C++ on top of well known open source libraries like VTK, ITK and wxWidgets among others. Based on GIMIAS, in this paper is presented a workflow for medical image analysis and simulation of the heart.}, url = {http://dx.doi.org/10.1007/978-3-642-01932-6_45}, author = {Larrabide, Ignacio and Omedas, Pedro and Martelli, Yves and Planes, Xavier and Nieber, Maarten and Moya, Juan and Butakoff, Constantine and Sebasti{\'a}n, Rafael and Camara, Oscar and De Craene, Mathieu and Bijnens, Bart and Frangi, Alejandro}, editor = {Ayache, Nicholas and Delingette, Herv{\'e} and Sermesant, Maxime} } @article {20044607, title = {Image analysis of breast cancer immunohistochemistry-stained sections using ImageJ: an RGB-based model.}, journal = {Anticancer research}, volume = {29}, year = {2009}, month = {2009 Dec}, pages = {4995-8}, abstract = {BACKGROUND: Image analysis of tissue sections using RGB image profiling is a modern accepted technique. MATERIALS AND METHODS: A new method of RGB analysis, using the freeware ImageJ, is presented which can be applied to sections with either nuclear or cytoplasmic staining. The step-by-step process is presented and the method is tested using breast cancer specimens immunostained for CK-19 and estrogen receptors. RESULTS: This image analysis easily discriminates CK-19 and estrogen receptor positivity in prepared breast cancer specimens. The method is easy to perform, without the need for previous image transformations. CONCLUSION: Compared to previous methods, this method proved more accurate in estimating the actual colours that an observer recognizes as positive after immunostaining. Further studies are needed to evaluate whether this method is efficient enough to be applied in clinical practice.}, author = {Vrekoussis, T and Chaniotis, V and Navrozoglou, I and Dousias, V and Pavlakis, K and Stathopoulos, E N and Zoras, O} } @article {19184561, title = {PyMVPA: A python toolbox for multivariate pattern analysis of fMRI data.}, journal = {Neuroinformatics}, volume = {7}, year = {2009}, month = {2009 Spring}, pages = {37-53}, abstract = {Decoding patterns of neural activity onto cognitive states is one of the central goals of functional brain imaging. Standard univariate fMRI analysis methods, which correlate cognitive and perceptual function with the blood oxygenation-level dependent (BOLD) signal, have proven successful in identifying anatomical regions based on signal increases during cognitive and perceptual tasks. Recently, researchers have begun to explore new multivariate techniques that have proven to be more flexible, more reliable, and more sensitive than standard univariate analysis. Drawing on the field of statistical learning theory, these new classifier-based analysis techniques possess explanatory power that could provide new insights into the functional properties of the brain. However, unlike the wealth of software packages for univariate analyses, there are few packages that facilitate multivariate pattern classification analyses of fMRI data. Here we introduce a Python-based, cross-platform, and open-source software toolbox, called PyMVPA, for the application of classifier-based analysis techniques to fMRI datasets. PyMVPA makes use of Python{\textquoteright}s ability to access libraries written in a large variety of programming languages and computing environments to interface with the wealth of existing machine learning packages. We present the framework in this paper and provide illustrative examples on its usage, features, and programmability.}, author = {Hanke, Michael and Halchenko, Yaroslav O and Sederberg, Per B and Hanson, Stephen Jos{\'e} and Haxby, James V and Pollmann, Stefan} } @article {19212459, title = {PyMVPA: A Unifying Approach to the Analysis of Neuroscientific Data.}, journal = {Frontiers in neuroinformatics}, volume = {3}, year = {2009}, month = {2009}, pages = {3}, abstract = {The Python programming language is steadily increasing in popularity as the language of choice for scientific computing. The ability of this scripting environment to access a huge code base in various languages, combined with its syntactical simplicity, make it the ideal tool for implementing and sharing ideas among scientists from numerous fields and with heterogeneous methodological backgrounds. The recent rise of reciprocal interest between the machine learning (ML) and neuroscience communities is an example of the desire for an inter-disciplinary transfer of computational methods that can benefit from a Python-based framework. For many years, a large fraction of both research communities have addressed, almost independently, very high-dimensional problems with almost completely non-overlapping methods. However, a number of recently published studies that applied ML methods to neuroscience research questions attracted a lot of attention from researchers from both fields, as well as the general public, and showed that this approach can provide novel and fruitful insights into the functioning of the brain. In this article we show how PyMVPA, a specialized Python framework for machine learning based data analysis, can help to facilitate this inter-disciplinary technology transfer by providing a single interface to a wide array of machine learning libraries and neural data-processing methods. We demonstrate the general applicability and power of PyMVPA via analyses of a number of neural data modalities, including fMRI, EEG, MEG, and extracellular recordings.}, author = {Hanke, Michael and Halchenko, Yaroslav O and Sederberg, Per B and Olivetti, Emanuele and Fr{\"u}nd, Ingo and Rieger, Jochem W and Herrmann, Christoph S and Haxby, James V and Hanson, Stephen Jos{\'e} and Pollmann, Stefan} } @article {444, title = {Visualization Software for Real-time, Image-guided Therapeutics in Cardiovascular Interventions}, year = {2009}, publisher = {CCSd/HAL : e-articles server (based on gBUS) [http://hal.ccsd.cnrs.fr/oai/oai.php] (France)}, abstract = {This paper introduces RtViewer, a four-dimensional (3D + time) real-time visualization software for guiding cardiovascular interventions that is open source and freely available. RtViewer was designed to be part of a pipeline that can connect it to a magnetic resonance imaging (MRI) scanner, actively tracked catheters, and navigational devices. The architecture and features of RtViewer will be described with examples of guiding percutaneous cardiovascular interventions. The paper concludes with a brief description of the work in progress on the next generation of this platform, named Vurtigo.}, keywords = {Computer Science/Medical Imaging}, url = {http://hal.archives-ouvertes.fr/inria-00417831/en/}, author = {Pintilie, Stefan and Biswas, Labonny and Anderson, Kevan and Dick, Sandy and Wright, Graham and Radau, Perry} } @article {18612462, title = {Automated identification of acute hepatitis B using electronic medical record data to facilitate public health surveillance.}, journal = {PloS one}, volume = {3}, year = {2008}, month = {2008}, pages = {e2626}, abstract = {BACKGROUND: Automatic identification of notifiable diseases from electronic medical records can potentially improve the timeliness and completeness of public health surveillance. We describe the development and implementation of an algorithm for prospective surveillance of patients with acute hepatitis B using electronic medical record data. METHODS: Initial algorithms were created by adapting Centers for Disease Control and Prevention diagnostic criteria for acute hepatitis B into electronic terms. The algorithms were tested by applying them to ambulatory electronic medical record data spanning 1990 to May 2006. A physician reviewer classified each case identified as acute or chronic infection. Additional criteria were added to algorithms in serial fashion to improve accuracy. The best algorithm was validated by applying it to prospective electronic medical record data from June 2006 through April 2008. Completeness of case capture was assessed by comparison with state health department records. FINDINGS: A final algorithm including a positive hepatitis B specific test, elevated transaminases and bilirubin, absence of prior positive hepatitis B tests, and absence of an ICD9 code for chronic hepatitis B identified 112/113 patients with acute hepatitis B (sensitivity 97.4\%, 95\% confidence interval 94-100\%; specificity 93.8\%, 95\% confidence interval 87-100\%). Application of this algorithm to prospective electronic medical record data identified 8 cases without false positives. These included 4 patients that had not been reported to the health department. There were no known cases of acute hepatitis B missed by the algorithm. CONCLUSIONS: An algorithm using codified electronic medical record data can reliably detect acute hepatitis B. The completeness of public health surveillance may be improved by automatically identifying notifiable diseases from electronic medical record data.}, author = {Klompas, Michael and Haney, Gillian and Church, Daniel and Lazarus, Ross and Hou, Xuanlin and Platt, Richard} } @article {18096909, title = {caGrid 1.0: an enterprise Grid infrastructure for biomedical research.}, journal = {Journal of the American Medical Informatics Association : JAMIA}, volume = {15}, year = {2008}, month = {2008 Mar-Apr}, pages = {138-49}, abstract = {OBJECTIVE: To develop software infrastructure that will provide support for discovery, characterization, integrated access, and management of diverse and disparate collections of information sources, analysis methods, and applications in biomedical research. DESIGN: An enterprise Grid software infrastructure, called caGrid version 1.0 (caGrid 1.0), has been developed as the core Grid architecture of the NCI-sponsored cancer Biomedical Informatics Grid (caBIG) program. It is designed to support a wide range of use cases in basic, translational, and clinical research, including 1) discovery, 2) integrated and large-scale data analysis, and 3) coordinated study. MEASUREMENTS: The caGrid is built as a Grid software infrastructure and leverages Grid computing technologies and the Web Services Resource Framework standards. It provides a set of core services, toolkits for the development and deployment of new community provided services, and application programming interfaces for building client applications. RESULTS: The caGrid 1.0 was released to the caBIG community in December 2006. It is built on open source components and caGrid source code is publicly and freely available under a liberal open source license. The core software, associated tools, and documentation can be downloaded from the following URL: https://cabig.nci.nih.gov/workspaces/Architecture/caGrid. CONCLUSIONS: While caGrid 1.0 is designed to address use cases in cancer research, the requirements associated with discovery, analysis and integration of large scale data, and coordinated studies are common in other biomedical fields. In this respect, caGrid 1.0 is the realization of a framework that can benefit the entire biomedical community.}, author = {Oster, Scott and Langella, Stephen and Hastings, Shannon and Ervin, David and Madduri, Ravi and Phillips, Joshua and Kurc, Tahsin and Siebenlist, Frank and Covitz, Peter and Shanbhag, Krishnakant and Foster, Ian and Saltz, Joel} } @article {19198666, title = {Generating Stimuli for Neuroscience Using PsychoPy.}, journal = {Frontiers in neuroinformatics}, volume = {2}, year = {2008}, month = {2008}, pages = {10}, abstract = {PsychoPy is a software library written in Python, using OpenGL to generate very precise visual stimuli on standard personal computers. It is designed to allow the construction of as wide a variety of neuroscience experiments as possible, with the least effort. By writing scripts in standard Python syntax users can generate an enormous variety of visual and auditory stimuli and can interact with a wide range of external hardware (enabling its use in fMRI, EEG, MEG etc.). The structure of scripts is simple and intuitive. As a result, new experiments can be written very quickly, and trying to understand a previously written script is easy, even with minimal code comments. PsychoPy can also generate movies and image sequences to be used in demos or simulated neuroscience experiments. This paper describes the range of tools and stimuli that it provides and the environment in which experiments are conducted.}, author = {Peirce, Jonathan W} } @article {19068935, title = {Implementation of the Veterans Health Administration VistA clinical information system around the world.}, journal = {Healthcare quarterly (Toronto, Ont.)}, volume = {11}, year = {2008}, month = {2008}, pages = {83-9}, abstract = {The success story of the Veterans Health Administration (VHA) within the US Department of Veterans Affairs has been well documented and is generally well known. What is generally not known is that the VHA{\textquoteright}s clinical information system, known as VistA, and the computerized patient record system clinical user interface front end have been successfully transported and implemented to a number of non-VHA healthcare organizations across the United States. Moreover, VistA software modules have been installed, or are being considered for installation, in healthcare institutions around the world in countries such as Mexico, Finland, Jordan, Germany, Nigeria, Egypt, Malaysia, India, Brazil, Pakistan and Samoa.}, author = {Protti, Denis and Groen, Peter} } @article {Jahnke-weber_makingavailable, title = {Making available Clinical Decision Support in Service-Oriented Architectures}, journal = {Journal on Information Technology in Healthcare}, volume = {6}, year = {2008}, pages = {54}, chapter = {42}, abstract = {Computer-based clinical decision support (CDS) has great potential for cost savings and for increasing patient safety and quality of care. The cost of owning and particularly maintaining CDS systems is significant. Therefore, it makes good economic sense to share a CDS service installation among a larger set of client systems. The emerging paradigm of serviceoriented architectures (SOAs) embraces the idea of sharing and interaction between loosely coupled, co-operative services. Canada has based its planned architecture for realizing the electronic medical record (EMR) on the SOA paradigm. While CDS components are currently not in the set of services to be constructed for Canada{\textquoteright}s health information infrastructure, they seems to be growing interest in adding them in the future, after the more essential services have been implemented. In this paper, we discuss the status of clinical decision support systems today and some challenges of making them available in SOA-based infrastructures. We report on design choices and solutions we have selected during the construction of the EGADSS (Electronic Guideline and Decision Support System) component. Our design decisions are based on domainspecific challenges such as knowledge, data and workflow interoperability as well as on technical considerations about construction high quality services for SOA-based infrastructures. EGADSS has been released under open-source license and is freely available.}, url = {http://citeseerx.ist.psu.edu/viewdoc/download?doi=10.1.1.88.5385\&rep=rep1\&type=pdf}, author = {Jens H. Jahnke-Weber and Morgan Price and Glen McCallum} } @article {18366760, title = {OpenMS - an open-source software framework for mass spectrometry.}, journal = {BMC bioinformatics}, volume = {9}, year = {2008}, month = {2008}, pages = {163}, abstract = {BACKGROUND: Mass spectrometry is an essential analytical technique for high-throughput analysis in proteomics and metabolomics. The development of new separation techniques, precise mass analyzers and experimental protocols is a very active field of research. This leads to more complex experimental setups yielding ever increasing amounts of data. Consequently, analysis of the data is currently often the bottleneck for experimental studies. Although software tools for many data analysis tasks are available today, they are often hard to combine with each other or not flexible enough to allow for rapid prototyping of a new analysis workflow. RESULTS: We present OpenMS, a software framework for rapid application development in mass spectrometry. OpenMS has been designed to be portable, easy-to-use and robust while offering a rich functionality ranging from basic data structures to sophisticated algorithms for data analysis. This has already been demonstrated in several studies. CONCLUSION: OpenMS is available under the Lesser GNU Public License (LGPL) from the project website at http://www.openms.de.}, url = {http://www.biomedcentral.com/1471-2105/9/163}, author = {Sturm, Marc and Bertsch, Andreas and Gr{\"o}pl, Clemens and Hildebrandt, Andreas and Hussong, Rene and Lange, Eva and Pfeifer, Nico and Schulz-Trieglaff, Ole and Zerck, Alexandra and Reinert, Knut and Kohlbacher, Oliver} } @article {17680308, title = {Mastering DICOM with DVTk.}, journal = {Journal of digital imaging : the official journal of the Society for Computer Applications in Radiology}, volume = {20 Suppl 1}, year = {2007}, month = {2007 Nov}, pages = {47-62}, abstract = {The Digital Imaging and Communications in Medicine (DICOM) Validation Toolkit (DVTk) is an open-source framework with potential value for anyone working with the DICOM standard. DICOM{\textquoteright}s flexibility requires hands-on experience in understanding ways in which the standard{\textquoteright}s interpretation may vary among vendors. DVTk was developed as a clinical engineering tool to aid and accelerate DICOM integration at clinical sites. DVTk is used to provide an independent measurement of the accuracy of a product{\textquoteright}s DICOM interface, according to both the DICOM standard and the product{\textquoteright}s conformance statement. DVTk has stand-alone tools and a framework with which developers can create new tools. We provide an overview of the architecture of the toolkit, sample scenarios of its utility, and evidence of its relative ease of use. Our goal is to encourage involvement in this open-source project and attract developers to build off and further enrich this platform for DICOM integration testing.}, url = {http://www.springerlink.com/content/r17t75244k2376n0/}, author = {Potter, Glenn and Busbridge, Rick and Toland, Michael and Nagy, Paul} } @article {17846835, title = {Open source software projects of the caBIG In Vivo Imaging Workspace Software special interest group.}, journal = {Journal of digital imaging : the official journal of the Society for Computer Applications in Radiology}, volume = {20 Suppl 1}, year = {2007}, month = {2007 Nov}, pages = {94-100}, abstract = {The Cancer Bioinformatics Grid (caBIG) program was created by the National Cancer Institute to facilitate sharing of IT infrastructure, data, and applications among the National Cancer Institute-sponsored cancer research centers. The program was launched in February 2004 and now links more than 50 cancer centers. In April 2005, the In Vivo Imaging Workspace was added to promote the use of imaging in cancer clinical trials. At the inaugural meeting, four special interest groups (SIGs) were established. The Software SIG was charged with identifying projects that focus on open-source software for image visualization and analysis. To date, two projects have been defined by the Software SIG. The eXtensible Imaging Platform project has produced a rapid application development environment that researchers may use to create targeted workflows customized for specific research projects. The Algorithm Validation Tools project will provide a set of tools and data structures that will be used to capture measurement information and associated needed to allow a gold standard to be defined for the given database against which change analysis algorithms can be tested. Through these and future efforts, the caBIG In Vivo Imaging Workspace Software SIG endeavors to advance imaging informatics and provide new open-source software tools to advance cancer research.}, author = {Prior, Fred W and Erickson, Bradley J and Tarbox, Lawrence} } @article {18074196, title = {Open-source, low-cost, high-reliability solutions for digital imaging systems: example of a "dicom router".}, journal = {La Radiologia medica}, volume = {112}, year = {2007}, month = {2007 Dec}, pages = {1252-9}, abstract = {PURPOSE: The purpose of this article is to illustrate a case where acquisition of digital imaging know-how by a modern radiotherapy division has helped to solve a technical problem while allowing substantial savings through the use of free and open-source resources. The problem was related to the necessity to route, with complex policies, the images produced by different digital imaging and communications in medicine (DICOM) sources within the department or in other divisions and/or hospitals. MATERIALS AND METHODS: The problem was solved by using completely free, well-tested and stable technologies (PHP, Apache, MySQL, DCMTK OFFIS, Red Hat Linux 9A and Linux Fedora Core 4) and low-cost hardware to contain costs. In the development, we also considered integration of the routed images with the existing electronic clinical records. RESULTS: The system developed, called the dicom router, implemented two kinds of routing: manual and automatic, both oriented to link the images acquired with the existing electronic clinical records. System stability was enhanced in a second phase by using a low-cost hardware redundancy solution. The system has now been operating for 1 year and has proved the value of the technologies used. CONCLUSIONS: The need to operate with more than one provider creates a series of integration issues, so that it becomes economically appealing to acquire internally the knowledge needed to interact more precisely with providers of big information technology (IT) solutions. This need is well catered for by open-source technologies, which are well documented and available to everyone. By using them, in-house IT technicians are able to implement valuable technical solutions for small-to medium-sized informatization problems, which would otherwise remain unsolved except with great economic efforts.}, author = {Gatta, R and Abeni, F and Buglione, M and Peveri, A and Barbera, F and Tonoli, S and Fratta, P and Magrini, S M} } @article {17254636, title = {PsychoPy--Psychophysics software in Python.}, journal = {Journal of neuroscience methods}, volume = {162}, year = {2007}, month = {2007 May 15}, pages = {8-13}, abstract = {The vast majority of studies into visual processing are conducted using computer display technology. The current paper describes a new free suite of software tools designed to make this task easier, using the latest advances in hardware and software. PsychoPy is a platform-independent experimental control system written in the Python interpreted language using entirely free libraries. PsychoPy scripts are designed to be extremely easy to read and write, while retaining complete power for the user to customize the stimuli and environment. Tools are provided within the package to allow everything from stimulus presentation and response collection (from a wide range of devices) to simple data analysis such as psychometric function fitting. Most importantly, PsychoPy is highly extensible and the whole system can evolve via user contributions. If a user wants to add support for a particular stimulus, analysis or hardware device they can look at the code for existing examples, modify them and submit the modifications back into the package so that the whole community benefits.}, author = {Peirce, Jonathan W} } @conference {SOFA07, title = {SOFA - an Open Source Framework for Medical Simulation}, booktitle = {Medicine Meets Virtual Reality (MMVR{\textquoteright}15)}, year = {2007}, month = {February}, address = {Long Beach, USA}, url = {http://www.sofa-framework.org/docs/SOFA_MMVR07.pdf}, author = {Allard, J{\'e}r{\'e}mie and Cotin, St{\'e}phane and Faure, Fran{\c c}ois and Bensoussan, Pierre-Jean and Poyer, Fran{\c c}ois and Duriez, Christian and Delingette, Herv{\'e} and Grisoni, Laurent} } @article {566, title = {O3-DPACS Open-Source Image-Data Manager/Archiver and HDW2 Image-Data Display: an IHE-compliant project pushing the e-health integration in the world.}, journal = {Computerized medical imaging and graphics : the official journal of the Computerized Medical Imaging Society}, volume = {30}, year = {2006}, month = {2006 Sep-Oct}, pages = {391-406}, abstract = {After many years of study, development and experimentation of open PACS and Image workstation solutions including management of medical data and signals (DPACS project), the research and development at the University of Trieste have recently been directed towards Java-based, IHE compliant and multi-purpose servers and clients. In this paper an original Image-Data Manager/Archiver (O3-DPACS) and a universal Image-Data Display (HDW2) are described. O3-DPACS is also part of a new project called Open Three (O3) Consortium, promoting Open Source adoption in e-health at European and world-wide levels. This project aims to give a contribution to the development of e-health through the study of Healthcare Information Systems and the contemporary proposal of new concepts, designs and solutions for the management of health data in an integrated environment: hospitals, Regional Health Information Organizations and citizens (home-care, mobile-care and ambient assisted living).}, keywords = {Data Display, Database Management Systems, Diagnostic Imaging, Equipment Design, Equipment Failure Analysis, Guidelines as Topic, Information Storage and Retrieval, Internationality, Medical Records Systems, Computerized, Programming Languages, Radiology Information Systems, Systems Integration, User-Computer Interface}, issn = {0895-6111}, author = {Inchingolo, Paolo and Beltrame, Marco and Bosazzi, Pierpaolo and Cicuta, Davide and Faustini, Giorgio and Mininel, Stefano and Poli, Andrea and Vatta, Federica} } @article {16545965, title = {User-guided 3D active contour segmentation of anatomical structures: significantly improved efficiency and reliability.}, journal = {NeuroImage}, volume = {31}, year = {2006}, month = {2006 Jul 1}, pages = {1116-28}, abstract = {Active contour segmentation and its robust implementation using level set methods are well-established theoretical approaches that have been studied thoroughly in the image analysis literature. Despite the existence of these powerful segmentation methods, the needs of clinical research continue to be fulfilled, to a large extent, using slice-by-slice manual tracing. To bridge the gap between methodological advances and clinical routine, we developed an open source application called ITK-SNAP, which is intended to make level set segmentation easily accessible to a wide range of users, including those with little or no mathematical expertise. This paper describes the methods and software engineering philosophy behind this new tool and provides the results of validation experiments performed in the context of an ongoing child autism neuroimaging study. The validation establishes SNAP intrarater and interrater reliability and overlap error statistics for the caudate nucleus and finds that SNAP is a highly reliable and efficient alternative to manual tracing. Analogous results for lateral ventricle segmentation are provided.}, author = {Yushkevich, Paul A and Piven, Joseph and Hazlett, Heather Cody and Smith, Rachel Gimpel and Ho, Sean and Gee, James C and Gerig, Guido} } @article {18238234, title = {BioSig: an imaging bioinformatics system for phenotypic analysis.}, journal = {IEEE transactions on systems, man, and cybernetics. Part B, Cybernetics : a publication of the IEEE Systems, Man, and Cybernetics Society}, volume = {33}, year = {2003}, month = {2003}, pages = {814-24}, abstract = {Organisms express their genomes in a cell-specific manner, resulting in a variety of cellular phenotypes or phenomes. Mapping cell phenomes under a variety of experimental conditions is necessary in order to understand the responses of organisms to stimuli. Representing such data requires an integrated view of experimental and informatic protocols. The proposed system, named BioSig, provides the foundation for cataloging cellular responses as a function of specific conditioning, treatment, staining, etc., for either fixed tissue or living cell studies. A data model has been developed to capture experimental variables and map them to image collections and their computed representation. This representation is hierarchical and spans across sample tissues, cells, and organelles, which are imaged with light microscopy. At each layer, content is represented with an attributed graph, which contains information about cellular morphology, protein localization, and cellular organization in tissue or cell culture. The Web-based multilayer informatics architecture uses the data model to provide guided workflow access for content exploration.}, author = {Parvin, B and Yang, Qing and Fontenay, G and Barcellos-Hoff, M H} }