From 5e0b1c40a3d04db35df9132cedcd304c8c7f3c6b Mon Sep 17 00:00:00 2001 From: Joseph Chingalo Date: Fri, 28 Nov 2025 12:51:24 +0300 Subject: [PATCH 1/4] chore: add brief about data use apps on the home page --- docs/overview/resources.md | 6 ------ sidebars.ts | 14 +------------- src/components/HomepageFeatures/index.tsx | 3 +++ 3 files changed, 4 insertions(+), 19 deletions(-) delete mode 100644 docs/overview/resources.md diff --git a/docs/overview/resources.md b/docs/overview/resources.md deleted file mode 100644 index 1262053..0000000 --- a/docs/overview/resources.md +++ /dev/null @@ -1,6 +0,0 @@ ---- -label: Resources -sidebar_position: 4 ---- - -# Resources diff --git a/sidebars.ts b/sidebars.ts index 6a09144..6de7a49 100644 --- a/sidebars.ts +++ b/sidebars.ts @@ -14,23 +14,11 @@ import type {SidebarsConfig} from '@docusaurus/plugin-content-docs'; */ const sidebars: SidebarsConfig = { // By default, Docusaurus generates a sidebar from the docs folder structure - overviewSidebar: [{type: 'autogenerated', dirName: 'overview'}], + overviewSidebar: [{type: 'autogenerated', dirName: 'overview'}], scorecardSidebar: [{type: 'autogenerated', dirName: 'scorecard'}], bnaSidebar: [{type: 'autogenerated', dirName: 'bna'}], actionTrackerSidebar: [{type: 'autogenerated', dirName: 'action-tracker'}], standaloneActionTrackerSidebar: [{type: 'autogenerated', dirName: 'standalone-action-tracker'}], - // But you can create a sidebar manually - /* - tutorialSidebar: [ - 'intro', - 'hello', - { - type: 'category', - label: 'Tutorial', - items: ['tutorial-basics/create-a-document'], - }, - ], - */ }; export default sidebars; diff --git a/src/components/HomepageFeatures/index.tsx b/src/components/HomepageFeatures/index.tsx index 260666b..31c422f 100644 --- a/src/components/HomepageFeatures/index.tsx +++ b/src/components/HomepageFeatures/index.tsx @@ -15,6 +15,7 @@ const FeatureList: FeatureItem[] = [ icon: "img/logos/scorecard.png", description: ( <> + The Scorecard application enables users to track, visualize, and review key performance indicators using color coding to highlight performance. ), }, @@ -23,6 +24,7 @@ const FeatureList: FeatureItem[] = [ icon: "img/logos/bna.png", description: ( <> + Bottleneck Analysis (BNA) is a DHIS2 application that helps identify and determine the bottlenecks using the Tanahashi model & identify the root causes. ), }, @@ -31,6 +33,7 @@ const FeatureList: FeatureItem[] = [ icon: "img/logos/bna-action-tracker.png", description: ( <> + The Action Tracker is a DHIS2 application that enables users to plan & track activities/actions resulting from root cause analysis from the BNA app. ), }, From d2f234c96d2114ee78f30f140d10a2fb71335c5c Mon Sep 17 00:00:00 2001 From: Joseph Chingalo Date: Fri, 28 Nov 2025 12:51:55 +0300 Subject: [PATCH 2/4] feat: add intoduction section on overview module --- docs/overview/intro.md | 55 +++++++++++++++++++++++++++++++++++++++--- 1 file changed, 52 insertions(+), 3 deletions(-) diff --git a/docs/overview/intro.md b/docs/overview/intro.md index 9738f5e..1360a10 100644 --- a/docs/overview/intro.md +++ b/docs/overview/intro.md @@ -1,8 +1,57 @@ --- -label: Introduction +label: Overview sidebar_position: 1 --- -# Introduction +# Overview +The Data Use Applications (Data Use Apps) are a suite of interconnected DHIS2 applications designed to strengthen data analysis, evidence based, informed decision making, and action planning across health and development programs. Developed collaboratively by **UNICEF**, **the University of Oslo (UiO)**, **HISP Uganda**, and **HISP Tanzania**, these apps empower stakeholders at all levels from national program leads to district managers and facility teams, to translate routine data into meaningful insights and tangible improvements in service delivery. -Introduction to Data use apps +## What Are Data Use Apps? +Data Use Apps are lightweight, modular DHIS2 applications focused on making data easy to interpret, actionable, and results oriented. They bring together essential tools for: +- Monitoring and tracking key performance indicators +- Identifying bottlenecks in service delivery +- Understanding root causes of performance gaps +- Planning and tracking interventions and activities +- Strengthening accountability and follow up mechanisms + +Together, these applications create a streamlined workflow from **data** → **insight** → **action** → **follow up**, enabling continuous improvement cycles within health systems. + +## Purpose of Data Use Apps + +The primary purpose of the Data Use Apps is to close the gap between data collection and data use. While DHIS2 provides powerful data collection and reporting capabilities, many programs struggle with: +- Identifying gaps and bottlenecks affecting performance +- Coordinating follow up actions +- Tracking the implementation and impact of those actions + +Data Use Apps simplify this process by giving users intuitive tools that help them: +- Review performance quickly using visual scorecards +- Detect bottlenecks through structured analysis using the Tanahashi model +- Identify root causes of poor performance +- Plan actions based on evidence +- Track progress on planned actions to ensure accountability and results + +## Overall Vision +The vision of the Data Use Apps ecosystem is to build a culture of data use at all levels of the health system, where decisions are guided by evidence, resources are allocated strategically, and actions are monitored for results. + +The long term goal is to provide a unified set of tools that: +- Enhance data driven planning and informed decision making +- Improve the quality and coverage of essential services +- Strengthen local and national accountability +- Support performance improvement across key indicators +- Enable governments and partners to achieve better outcomes for women, children, and vulnerable populations + +By integrating analysis, diagnosis, planning, and monitoring into a single workflow, the Data Use Apps help institutions move from reactive reporting to proactive, evidence driven program management. + +## Components of the Data Use Apps Suite +The suite currently consists of four core applications, each addressing a different stage of the data use cycle: +### Scorecard App +Provides a quick, visual way to track and compare the performance of key indicators. Color coding helps users immediately see where performance is strong, needs attention, or requires urgent action. + +### Bottleneck Analysis (BNA) App +Helps users identify barriers affecting service delivery using the Tanahashi model (availability, accessibility, acceptability, contact, and quality). Users can analyze gaps and determine root causes of poor performance. + +### BNA Linked Action Tracker +A dedicated tool that connects bottleneck and root cause findings to concrete actions. Users can plan, assign, and monitor activities directly linked to BNA results. + +### Standalone Action Tracker (SAT) +A flexible app for teams that need an independent action-tracking workflow. It supports planning, coordination, and follow-up on priority actions, even beyond BNA processes. From 3146cc34f60e530e6dae77ab9c514b038cc3580d Mon Sep 17 00:00:00 2001 From: Joseph Chingalo Date: Fri, 28 Nov 2025 12:52:17 +0300 Subject: [PATCH 3/4] feat: add contents for ecosystem section in overview module --- docs/overview/ecosystem.md | 64 ++++++++++++++++++++++++++++++++++++++ 1 file changed, 64 insertions(+) diff --git a/docs/overview/ecosystem.md b/docs/overview/ecosystem.md index dc2a7ec..afb467d 100644 --- a/docs/overview/ecosystem.md +++ b/docs/overview/ecosystem.md @@ -4,3 +4,67 @@ sidebar_position: 2 --- # Ecosystem overview + +## How the Data Use Apps Work Together + +The Data Use Apps are designed not as standalone tools, but as an integrated data-to-action ecosystem. Each app plays a unique role in the decision making cycle, creating a continuous loop from performance monitoring to action follow up. Together, they enable programs to move from identifying problems to implementing and tracking solutions within a single, unified workflow. + +## The Data Use Apps Workflow +At the core of the ecosystem is a four-step process: +- Review & Interpret Performance (Scorecard App) +- Identify Bottlenecks & Diagnose Causes (BNA App) +- Plan & Assign Actions (Linked Action Tracker) +- Track Implementation & Accountability (Standalone or Linked AT) + +This workflow ensures that data is not only analyzed but systematically used to improve services. + +## How the Four Apps Connect +Below is the flow of interaction among the data use apps: + +### Scorecard → BNA +Users begin by reviewing performance indicators in the Scorecard App. +If an indicator performs poorly (e.g., marked red or yellow), they can launch a Bottleneck Analysis to investigate further. +#### Purpose: +- Identify what is underperforming +- Move to understanding why it’s underperforming + +### BNA → Root Cause Analysis + +Within the BNA App, users evaluate performance through the Tanahashi model to identify the specific stage (availability, accessibility, acceptability, contact, quality) where the bottleneck exists. +From there, users perform root cause analysis (e.g., using the 5 Whys). +#### Outcome: +A list of bottlenecks and root causes that require action. + +### BNA → Linked Action Tracker +Once bottlenecks and root causes are identified, users can seamlessly send them to the Linked Action Tracker. +#### Purpose: +- Create action plans +- Assign responsibilities +- Set timelines +- Link actions directly to BNA findings + +This ensures transparent follow up on the exact gaps identified through analysis. + +### Action Tracker (Standalone or Linked) → Implementation & Monitoring +Teams use the Action Tracker to: +- Track progress of planned activities +- Update status (in progress, completed, delayed) +- Document evidence +- Communicate responsibilities + +The Action Tracker creates organizational accountability and closes the loop between data insights and real-world actions. + +### Feedback Loop to Scorecard +As actions are implemented and data is updated in DHIS2: +- Improved performance becomes visible again in the Scorecard App +- Managers can verify whether actions had the expected impact +- If gaps persist, the cycle continues + +This supports a continuous quality improvement cycle. + +## Why the Ecosystem Matters +- Ensures end to end data use, not just analysis +- Creates a structured method to identify and fix performance gaps +- Links **evidence** → **action** → *accountability* +- Reduces fragmentation between planning tools +- Strengthens program management and improves outcomes From 0704be1a4796781ac41246ac866fb7ed89b3837c Mon Sep 17 00:00:00 2001 From: Joseph Chingalo Date: Fri, 28 Nov 2025 12:52:50 +0300 Subject: [PATCH 4/4] feat: add contents on Implementation Framework section of overview module --- docs/overview/implementation.md | 132 ++++++++++++++++++++++++++++++++ 1 file changed, 132 insertions(+) diff --git a/docs/overview/implementation.md b/docs/overview/implementation.md index b343142..13856ad 100644 --- a/docs/overview/implementation.md +++ b/docs/overview/implementation.md @@ -4,3 +4,135 @@ sidebar_position: 3 --- # Implementation Framework +The implementation of the Data Use Apps requires a combination of technical setup, stakeholder engagement, and governance alignment. This section provides a high level guide to help countries, programs, and implementing partners adopt and operationalize the full Data Use Apps ecosystem successfully. + +## Prerequisites +Before deploying the Data Use Apps, ensure the following technical and organizational prerequisites are in place: + +### DHIS2 Platform Requirements +- DHIS2 version 2.39 or higher (recommended: latest stable version) +- Proper configuration of: + - Data elements and indicators + - Data sets or program indicators + - Organizational hierarchy + - User roles and sharing settings +- Functional analytics and data approvals where applicable + +### Data Availability & Quality +The apps rely on timely and accurate data. Implementers should confirm: +- Routine data reporting is consistent +- Indicator formulas are correct and validated +- Data quality checks (DQAs) are conducted regularly +- Domains used in scorecards and BNA are populated with real data + +### Infrastructure & Access +- Stable server hosting environment +- Reliable internet connectivity for end users +- User accounts with appropriate permissions (analysis, dashboards, apps) +- Access to the DHIS2 App Hub or ability to deploy custom apps + +## Implementation Governance +Successful adoption requires strong governance structures to ensure ownership, sustainability, and alignment with national systems and guidelines. + +### Leadership & Coordination +Countries should designate: +- A national coordinating team (MoH, implementing partners) +- A technical working group (TWG) for M&E or Digital Health +- A dedicated Data Use Task Team at subnational levels + +These teams guide adoption, review progress, and ensure integration into existing planning processes. + +### Integration With National Processes +The Data Use Apps must align with: +- Annual or quarterly performance reviews +- National health sector planning cycles +- RMNCAH, Nutrition, Immunization, HIV, or other program reviews +- Joint supervision and mentorship structures + +Ensuring integration increases sustainability and reduces parallel processes. + +### Roles & Responsibilities +Clear roles should be defined for: +- Ministry of Health / Program Units: Ownership, policy direction +- **District Teams**: Data review, bottleneck analysis, action follow-up +- **Facility Teams**: Data entry and local improvement actions +- **HISP Teams**: Technical configuration, user training, support +- **Partners (UNICEF, NGOs)**: Capacity building and supervision support + + +## Adoption Steps (High-Level Guide) +### Orientation & Stakeholder Buy-In +Introduce the role of the apps in enhancing data to action processes. Facilitate sessions with national and subnational leaders to build understanding and commitment. + +### System Readiness Assessment +Evaluate: +- Data quality and availability +- Existing scorecards +- Completeness of indicator metadata +- Alignment with program needs + +This ensures the tools are adopted on a solid baseline. + +### Technical Deployment +- Install required applications (Scorecard, BNA, Linked AT, SAT) +- Configure indicator groups, domains, BNA models, and action categories +- Validate user roles and sharing permissions +- Test end to end workflows in a testing environment + +### Capacity Building +Develop a training plan tailored for: +- National program managers +- Regional/district data teams +- Facility-level staff + +Training should include practical exercises using real program data. + +### Pilot Implementation +Run a pilot in selected regions or districts to: +- Test workflows +- Validate indicators and BNA models +- Train action planning teams +- Identify user challenges and refine configurations + +### Scale Up & Institutionalization +Once validated: +- Roll out to more regions/districts +- Embed workflows into quarterly review meetings +- Use AT to track follow ups of RMNCAH/immunization/nutrition actions +- Monitor usage and impact via dashboards + +### Continuous Improvement +Use insights from pilot and rollout to: +- Update scorecard indicator sets +- Refine bottleneck models +- Improve action tracking categories +- Strengthen data quality interventions + +This ensures the ecosystem evolves with program needs. + +## Data Governance & Accountability +To ensure long term sustainability, programs should establish: + +### Data Governance Framework +- Policies for indicator updates and reviews +- Data validation and quality assurance processes +- Clear naming and metadata standards +- Approval processes for changes in scorecards or BNA models + +### Action Accountability Mechanism +An established workflow where: +- Actions are reviewed during routine review meetings +- Progress status is updated in Action Tracker +- Leaders provide feedback and support +- Completed actions feed back into performance improvement + +This reinforces a culture of data use and results-based management. + +## Recommended Best Practices +- **Start simple**: begin with a small set of priority indicators +- Use real data during trainings +- Avoid overly complex scorecard domains +- Limit the number of bottleneck questions to essential ones +- Assign clear owners for each action +- Review action progress monthly or quarterly +- Ensure digital and non digital processes align