Migrate to Gitea

This commit is contained in:
Landon Taylor 2025-04-17 15:53:45 -06:00
parent da86957915
commit 7a0b0cd465
46 changed files with 6083 additions and 0 deletions

31
.gitignore vendored Normal file
View File

@ -0,0 +1,31 @@
# Compiled binary
*.exe
*.dll
*.so
*.dylib
# Rust/Cargo specific files
/target/
**/target/
# Build artifacts
*.rlib
# Generated by Cargo
Cargo.lock
# IDEs and editors
.idea/
.vscode/
*.swp
*.swo
*.swn
*.bak
*.tmp
# Logs
*.log
# Other
.DS_Store
Thumbs.db

14
Cargo.toml Normal file
View File

@ -0,0 +1,14 @@
[package]
name = "fable"
version = "0.1.0"
edition = "2021"
[dependencies]
crossterm = "0.28.1"
inquire = "0.7.5"
rand = "0.9.0"
serde = { version = "1.0.219", features = ["derive"] }
serde_json = "1.0.140"
serde_with = { version = "3.12.0", features = ["json"] }
strum = "0.27.1"
strum_macros = "0.27.1"

View File

@ -0,0 +1,145 @@
{
"unit": {
"Appraisal": "ConversionToPropositional"
},
"title": "Galactic Logic Academy: Mastering Propositional Logic Conversion",
"screens": [
{
"screen_type": "Text",
"text": "Welcome, Cadet! You've been selected to join the Galactic Logic Academy's elite training program. Your mission is to master the art of converting natural language statements into propositional logic. This skill is essential for resolving intergalactic disputes and ensuring peace across the stars. Ready to begin? Let's dive in!",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Text",
"text": "Propositional logic is a powerful tool for analyzing arguments and reasoning. It allows us to represent statements in a formal, structured way. Your first task is to understand the basic building blocks of propositional logic: propositions, connectives, and truth values.",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Text",
"text": "A proposition is a statement that can be either true or false. For example, 'The starship is operational' is a proposition. Logical connectives like AND (∧), OR (), NOT (¬), and IMPLIES (→) help us combine propositions to form more complex statements.",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Text",
"text": "To convert a natural language statement into propositional logic, identify the key propositions and their relationships. For example, the statement 'If the starship is repaired, it will fly' can be represented as P → Q, where P is 'The starship is repaired' and Q is 'The starship will fly.'",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Mcq",
"text": "",
"question": "Which of the following represents the statement 'If it rains, the ground will be wet' in propositional logic?",
"options": [
"P ∧ Q",
"P → Q",
"P Q",
"¬P → Q"
],
"correct_indices": [1],
"correct_text": null,
"hints": [
"Identify the condition (P) and the result (Q)."
]
},
{
"screen_type": "Text",
"text": "Negation is another important concept in propositional logic. For example, the statement 'The starship is not operational' can be represented as ¬P, where P is 'The starship is operational.'",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Mcq",
"text": "",
"question": "How would you represent the statement 'The starship is not repaired' in propositional logic?",
"options": [
"P",
"¬P",
"P Q",
"P → Q"
],
"correct_indices": [1],
"correct_text": null,
"hints": [
"Negation is represented by ¬."
]
},
{
"screen_type": "Text",
"text": "Conjunctions combine two propositions with AND (∧). For example, 'The starship is repaired and operational' can be represented as P ∧ Q, where P is 'The starship is repaired' and Q is 'The starship is operational.'",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Checkboxes",
"text": "",
"question": "Which of the following are examples of conjunctions in propositional logic? Select all that apply.",
"options": [
"P ∧ Q",
"P Q",
"¬P ∧ Q",
"P → Q",
"P ∧ ¬Q"
],
"correct_indices": [0, 2, 4],
"correct_text": null,
"hints": [
"Conjunctions use the AND (∧) operator."
]
},
{
"screen_type": "Text",
"text": "Disjunctions combine two propositions with OR (). For example, 'The starship is repaired or operational' can be represented as P Q, where P is 'The starship is repaired' and Q is 'The starship is operational.'",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Mcq",
"text": "",
"question": "How would you represent the statement 'The starship is repaired or operational' in propositional logic?",
"options": [
"P ∧ Q",
"P Q",
"¬P ∧ Q",
"P → Q"
],
"correct_indices": [1],
"correct_text": null,
"hints": [
"Disjunctions use the OR () operator."
]
},
{
"screen_type": "Text",
"text": "Congratulations, Cadet! You've completed the Galactic Logic Academy's unit on converting natural language statements into propositional logic. Your ability to translate complex arguments into formal logic will help maintain peace and order across the galaxy. The stars are counting on you!",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
}
]
}

View File

@ -0,0 +1,118 @@
{
"unit": {
"Appraisal": "CounterArgument"
},
"title": "Galactic Logic Academy: Mastering the Art of Counterarguments",
"screens": [
{
"screen_type": "Text",
"text": "Welcome, Cadet! You've been selected to join the Galactic Logic Academy's mission to master the art of counterarguments. In this course, you'll learn how to critically analyze arguments, identify weaknesses, and craft compelling counterarguments to strengthen your reasoning. Ready to sharpen your skills? Let's begin!",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Text",
"text": "A counterargument is a reasoned response to an argument that challenges its validity or offers an alternative perspective. Crafting a strong counterargument requires understanding the original argument, identifying its weaknesses, and presenting a logical rebuttal.",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Text",
"text": "Step 1: Understand the Argument. Before crafting a counterargument, ensure you fully understand the original argument. What is the claim? What evidence supports it? What assumptions are being made?",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Text",
"text": "Step 2: Identify Weaknesses. Look for logical fallacies, unsupported assumptions, or gaps in evidence. For example, does the argument rely on a false equivalence or circular reasoning?",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Mcq",
"text": "",
"question": "Which of the following is a logical fallacy that weakens an argument?",
"options": [
"False equivalence",
"Strong evidence",
"Clear reasoning"
],
"correct_indices": [0],
"correct_text": null,
"hints": [
"A logical fallacy undermines the validity of an argument."
]
},
{
"screen_type": "Text",
"text": "Step 3: Present Your Counterargument. A strong counterargument directly addresses the weaknesses of the original argument. Use evidence, logic, and clarity to make your case.",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Mcq",
"text": "",
"question": "Which of the following is an example of a strong counterargument?",
"options": [
"The argument assumes that all planets with water are habitable, but scientific evidence shows that other factors, such as atmosphere and temperature, are also critical.",
"The argument is wrong because I disagree with it.",
"The argument is invalid because it doesn't make sense to me."
],
"correct_indices": [0],
"correct_text": null,
"hints": [
"A strong counterargument uses evidence and logic to address the original argument's weaknesses."
]
},
{
"screen_type": "Text",
"text": "Step 4: Anticipate Rebuttals. A great counterargument also considers potential responses and addresses them preemptively. This demonstrates thorough understanding and strengthens your position.",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Checkboxes",
"text": "",
"question": "Which of the following are strategies for crafting a strong counterargument? Select all that apply.",
"options": [
"Understand the original argument fully.",
"Identify logical fallacies or weaknesses.",
"Use evidence and logic to support your rebuttal.",
"Ignore potential rebuttals to your counterargument.",
"Address potential rebuttals preemptively."
],
"correct_indices": [0, 1, 2, 4],
"correct_text": null,
"hints": [
"A strong counterargument is well-reasoned, evidence-based, and anticipates rebuttals."
]
},
{
"screen_type": "Text",
"text": "Congratulations, Cadet! You've completed the Galactic Logic Academy's course on writing counterarguments. Remember, a well-crafted counterargument is a powerful tool for critical thinking and effective communication. Use your skills wisely to promote understanding and resolve conflicts across the galaxy!",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
}
]
}

View File

@ -0,0 +1,117 @@
{
"unit": {
"Bias": "BarnumEffect"
},
"title": "Galactic Psychology Academy: Understanding the Barnum Effect",
"screens": [
{
"screen_type": "Text",
"text": "Welcome, Cadet! You've been selected to embark on a mission to uncover the mysteries of the mind. As intergalactic leaders seek guidance, your task is to understand and identify cognitive biases that influence decision-making. Ready to explore the stars of the mind? Let's begin!",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Text",
"text": "In the galaxy, misunderstandings often arise due to cognitive biases. The Barnum Effect occurs when people believe that vague, general statements about personality are uniquely applicable to them. As a cadet, your mission is to identify and understand this bias.",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Text",
"text": "Remember the last couple of units you completed? They were all about cognitive biases. The Barnum Effect is a bias where people see personal meaning in statements that could apply to anyone.",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Text",
"text": "An example of the Barnum Effect is when a planetary leader reads a horoscope saying, 'You are a natural leader who values harmony but can be stubborn at times,' and believes it describes them perfectly. This statement could apply to almost anyone.",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Mcq",
"text": "",
"question": "Which of the following is an example of the Barnum Effect?",
"options": [
"Believing a horoscope that says, 'You have a great need for others to like and admire you.'",
"Assuming that a specific scientific theory applies only to your planet.",
"Believing that a unique planetary alignment affects only your species."
],
"correct_indices": [0],
"correct_text": null,
"hints": [
"The Barnum Effect involves vague statements that could apply to many people."
]
},
{
"screen_type": "Text",
"text": "The Barnum Effect is often used in personality tests, horoscopes, and fortune-telling. For example, a fortune-teller might say, 'You have a strong desire to be loved, but sometimes you doubt yourself.' This statement is designed to feel personal but is actually very general.",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Checkboxes",
"text": "",
"question": "Which of the following are examples of the Barnum Effect? Select all that apply.",
"options": [
"Believing a personality test that says, 'You are sometimes introverted but can be outgoing in the right situations.'",
"Thinking a horoscope that says, 'You have a creative side that you don't always show,' is uniquely about you.",
"Assuming that a vague statement like, 'You have experienced challenges in your life,' is highly specific to you.",
"Believing that a fortune-teller's statement, 'You are a kind person who values honesty,' is uniquely accurate."
],
"correct_indices": [0, 1, 2, 3],
"correct_text": null,
"hints": [
"The Barnum Effect relies on statements that feel personal but are actually very general."
]
},
{
"screen_type": "Text",
"text": "The Barnum Effect can lead to overconfidence in pseudosciences like astrology or fortune-telling. Understanding this bias helps you critically evaluate such claims and make informed decisions.",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Mcq",
"text": "",
"question": "Why is it important to understand the Barnum Effect?",
"options": [
"To avoid being misled by vague statements that seem personal.",
"To improve your ability to create horoscopes.",
"To ensure that you can always trust personality tests."
],
"correct_indices": [0],
"correct_text": null,
"hints": [
"Understanding the Barnum Effect helps you critically evaluate general statements."
]
},
{
"screen_type": "Text",
"text": "Congratulations, Cadet! You've completed the Galactic Psychology Academy's unit on the Barnum Effect. Remember, your ability to recognize this bias is key to making rational decisions and guiding others toward clarity. The galaxy is counting on your insight!",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
}
]
}

View File

@ -0,0 +1,109 @@
{
"unit": {
"Bias": "ConfirmationBias"
},
"title": "Galactic Logic Academy: Mastering Confirmation Bias to Save the Stars",
"screens": [
{
"screen_type": "Text",
"text": "Welcome, Cadet! The galaxy is in turmoil, and your mission is to restore balance by mastering the art of critical thinking. Today, we focus on confirmation bias—a cognitive bias that clouds judgment and fuels interplanetary disputes. Ready to sharpen your reasoning skills? Let's begin!",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Text",
"text": "Confirmation bias occurs when individuals favor information that supports their existing beliefs while ignoring evidence that contradicts them. This bias can lead to flawed decisions and unnecessary conflicts. As a cadet, your task is to identify and counteract this bias.",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Text",
"text": "Imagine a planetary leader who believes their planet is the most advanced in the galaxy. They only seek out data that supports this belief, ignoring evidence of other planets' technological achievements. This is confirmation bias in action.",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Mcq",
"text": "",
"question": "Which of the following scenarios demonstrates confirmation bias?",
"options": [
"A scientist only publishes data that supports their hypothesis, ignoring contradictory results.",
"A leader considers all available evidence before making a decision.",
"A trader seeks advice from experts with differing opinions to make an informed choice."
],
"correct_indices": [0],
"correct_text": null,
"hints": [
"Confirmation bias involves selectively focusing on information that aligns with pre-existing beliefs."
]
},
{
"screen_type": "Text",
"text": "Confirmation bias can also manifest in group settings. For example, a council of planetary leaders might only invite members who agree with their policies, creating an echo chamber that reinforces their biases.",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Checkboxes",
"text": "",
"question": "Which of the following are examples of confirmation bias? Select all that apply.",
"options": [
"A planetary council only invites members who agree with their policies.",
"A researcher dismisses studies that contradict their theory.",
"A pilot double-checks all navigation data, even if it contradicts their initial route.",
"A citizen only watches news channels that align with their political views.",
"A scientist revises their hypothesis after reviewing contradictory evidence."
],
"correct_indices": [0, 1, 3],
"correct_text": null,
"hints": [
"Confirmation bias often involves ignoring or dismissing contradictory evidence."
]
},
{
"screen_type": "Text",
"text": "To combat confirmation bias, it's essential to actively seek out diverse perspectives and critically evaluate all evidence. This approach fosters better decision-making and promotes harmony across the galaxy.",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Mcq",
"text": "",
"question": "Which strategy is most effective in overcoming confirmation bias?",
"options": [
"Seeking out evidence that supports your beliefs.",
"Ignoring evidence that contradicts your beliefs.",
"Actively seeking diverse perspectives and critically evaluating all evidence."
],
"correct_indices": [2],
"correct_text": null,
"hints": [
"Overcoming confirmation bias requires openness to differing viewpoints."
]
},
{
"screen_type": "Text",
"text": "Congratulations, Cadet! You've completed the Galactic Logic Academy's unit on confirmation bias. By recognizing and overcoming this bias, you are now equipped to make fair and informed decisions that will bring peace to the galaxy. The stars shine brighter because of you!",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
}
]
}

View File

@ -0,0 +1,118 @@
{
"unit": {
"Bias": "DunningKrugerEffect"
},
"title": "Galactic Logic Academy: Understanding the Dunning-Kruger Effect",
"screens": [
{
"screen_type": "Text",
"text": "Welcome, Cadet! You've been selected to embark on a mission to explore the mysteries of the mind. Your task is to understand the Dunning-Kruger effect, a cognitive bias that can lead to overconfidence in one's abilities. Ready to sharpen your insights? Let's begin!",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Text",
"text": "The Dunning-Kruger effect occurs when individuals with low ability in a particular area overestimate their competence. This bias can lead to poor decisions and misunderstandings. As a cadet, your mission is to recognize and mitigate this bias.",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Text",
"text": "Think back to times when you or others may have been overly confident in a new skill. The Dunning-Kruger effect often arises because individuals lack the knowledge to recognize their own limitations.",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Text",
"text": "For example, a cadet might claim, 'Ive read one book on intergalactic diplomacy, so Im ready to negotiate peace treaties.' This overconfidence can lead to mistakes and missed opportunities for growth.",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Mcq",
"text": "",
"question": "Which of the following is an example of the Dunning-Kruger effect?",
"options": [
"A cadet with minimal training believes they are ready to lead a mission.",
"An experienced pilot doubts their ability to navigate a challenging route.",
"A scientist with years of research acknowledges the limits of their knowledge."
],
"correct_indices": [0],
"correct_text": null,
"hints": [
"The Dunning-Kruger effect involves overestimating one's abilities due to a lack of expertise."
]
},
{
"screen_type": "Text",
"text": "The Dunning-Kruger effect can also manifest in reverse: experts may underestimate their abilities, assuming others find tasks as easy as they do. This is known as the 'impostor syndrome.'",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Mcq",
"text": "",
"question": "Which of the following describes the reverse Dunning-Kruger effect?",
"options": [
"A novice overestimates their skills.",
"An expert underestimates their abilities.",
"A cadet accurately assesses their competence."
],
"correct_indices": [1],
"correct_text": null,
"hints": [
"Experts may assume their skills are common knowledge."
]
},
{
"screen_type": "Text",
"text": "To combat the Dunning-Kruger effect, its important to seek feedback, remain open to learning, and recognize the value of expertise. Growth begins with acknowledging what you dont know.",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Checkboxes",
"text": "",
"question": "Which of the following strategies can help mitigate the Dunning-Kruger effect? Select all that apply.",
"options": [
"Seeking feedback from others.",
"Assuming you know everything about a topic.",
"Continuing to learn and practice.",
"Acknowledging your limitations.",
"Avoiding challenges to protect your confidence."
],
"correct_indices": [0, 2, 3],
"correct_text": null,
"hints": [
"Effective strategies involve humility and a willingness to grow."
]
},
{
"screen_type": "Text",
"text": "Congratulations, Cadet! You've completed the Galactic Logic Academy's unit on the Dunning-Kruger effect. Remember, understanding this bias is key to personal growth and effective teamwork. The galaxy needs your wisdom!",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
}
]
}

View File

@ -0,0 +1,108 @@
{
"unit": {
"Bias": "FundamentalAttributionError"
},
"title": "Galactic Logic Academy: Understanding the Fundamental Attribution Error",
"screens": [
{
"screen_type": "Text",
"text": "Greetings, Cadet! Welcome to the Galactic Logic Academy's unit on cognitive biases. Today, you'll learn about the Fundamental Attribution Error, a common bias that can lead to misunderstandings and conflict. Ready to sharpen your reasoning skills and bring clarity to the galaxy? Let's begin!",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Text",
"text": "In the galaxy, leaders often misjudge others' actions by attributing them to personality traits rather than external circumstances. This is known as the Fundamental Attribution Error. Your mission is to identify and correct this bias to foster better understanding and cooperation.",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Text",
"text": "The Fundamental Attribution Error occurs when we overemphasize personal traits and underestimate situational factors in explaining others' behavior. For example, if a pilot is late to a meeting, we might assume they are lazy, ignoring the possibility of a delayed starship.",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Mcq",
"text": "",
"question": "Which of the following is an example of the Fundamental Attribution Error?",
"options": [
"Assuming a planetary ambassador is rude because they interrupted a meeting, without considering they received an urgent message.",
"Believing a starship mechanic is skilled because they fixed a ship quickly, without considering they had help from a team.",
"Thinking a leader is wise because they made a good decision, without considering they had expert advisors."
],
"correct_indices": [0],
"correct_text": null,
"hints": [
"The Fundamental Attribution Error focuses on attributing behavior to personality rather than external factors."
]
},
{
"screen_type": "Text",
"text": "The Fundamental Attribution Error can lead to unfair judgments and strained relationships. For instance, a planetary leader might assume another leader is hostile due to their tone in a message, ignoring the stress of an intergalactic crisis.",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Checkboxes",
"text": "",
"question": "Which of the following scenarios demonstrate the Fundamental Attribution Error? Select all that apply.",
"options": [
"Assuming a starship captain is reckless because they took a risky route, without considering they were avoiding a meteor shower.",
"Believing a diplomat is untrustworthy because they hesitated during negotiations, without considering they were processing new information.",
"Thinking a scientist is brilliant because they made a breakthrough, without considering they had access to advanced technology.",
"Judging a trader as greedy because they raised prices, without considering a shortage of resources."
],
"correct_indices": [0, 1, 3],
"correct_text": null,
"hints": [
"Look for scenarios where behavior is attributed to personality traits rather than external circumstances."
]
},
{
"screen_type": "Text",
"text": "To counter the Fundamental Attribution Error, always consider situational factors before making judgments. Ask yourself: Could external circumstances explain this behavior? This approach fosters empathy and understanding.",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Mcq",
"text": "",
"question": "How can you avoid the Fundamental Attribution Error?",
"options": [
"Always assume people act based on their personality traits.",
"Consider external factors that might influence someone's behavior.",
"Focus only on the outcomes of someone's actions."
],
"correct_indices": [1],
"correct_text": null,
"hints": [
"Avoiding this bias requires considering situational influences."
]
},
{
"screen_type": "Text",
"text": "Congratulations, Cadet! You've completed the Galactic Logic Academy's unit on the Fundamental Attribution Error. By recognizing and addressing this bias, you can promote understanding and harmony across the galaxy. The stars shine brighter with your wisdom!",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
}
]
}

View File

@ -0,0 +1,119 @@
{
"unit": {
"Bias": "InGroupBias"
},
"title": "Galactic Logic Academy: Understanding In-Group Bias",
"screens": [
{
"screen_type": "Text",
"text": "Hey, Cadet! You've been chosen to embark on a mission to rescue the galaxy from division. As disputes arise between interplanetary groups, your task is to understand and address in-group bias to foster unity and cooperation. Ready to save the stars? Let's begin!",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Text",
"text": "In the galaxy, conflicts often arise due to in-group bias. This bias occurs when individuals favor their own group over others, leading to unfair treatment and misunderstandings. As a cadet, your mission is to identify and mitigate this bias to promote harmony.",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Text",
"text": "Remember the last couple of units you completed? They were all about understanding cognitive biases. In-group bias is a specific type of bias where people prioritize their group, often at the expense of fairness or objectivity.",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Text",
"text": "One example of in-group bias is when a planetary leader allocates resources disproportionately to their own planet, ignoring the needs of others. This favoritism can lead to resentment and conflict among interplanetary alliances.",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Mcq",
"text": "",
"question": "Which of the following is an example of in-group bias?",
"options": [
"A planetary council prioritizes funding for its own planet's projects over equally important interplanetary initiatives.",
"A leader allocates resources based on objective need, regardless of planetary affiliation.",
"A group of scientists collaborates with researchers from other planets to solve a galaxy-wide problem."
],
"correct_indices": [0],
"correct_text": null,
"hints": [
"In-group bias often involves favoritism toward one's own group, even when it is not justified."
]
},
{
"screen_type": "Text",
"text": "Another example of in-group bias is when individuals assume their group's values or perspectives are superior to others. This can lead to stereotyping and a lack of understanding between groups.",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Mcq",
"text": "",
"question": "Which of the following statements reflects in-group bias?",
"options": [
"Our planet's culture is the most advanced in the galaxy, so other planets should adopt our ways.",
"Every planet has unique strengths and perspectives that contribute to the galaxy's diversity.",
"We should collaborate with other planets to learn from their experiences and improve together."
],
"correct_indices": [0],
"correct_text": null,
"hints": [
"In-group bias often involves assuming one's group is superior to others."
]
},
{
"screen_type": "Text",
"text": "The effects of in-group bias can be mitigated by fostering empathy and understanding between groups. Encouraging interplanetary dialogue and collaboration can help break down barriers and build trust.",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Checkboxes",
"text": "",
"question": "Which of the following actions can help reduce in-group bias? Select all that apply.",
"options": [
"Encouraging interplanetary cultural exchanges.",
"Promoting shared goals and cooperation between groups.",
"Focusing only on the needs of one's own group.",
"Acknowledging and valuing the contributions of all groups.",
"Avoiding interactions with groups that are different from your own.",
"Educating individuals about the dangers of in-group bias."
],
"correct_indices": [0, 1, 3, 5],
"correct_text": null,
"hints": [
"Reducing in-group bias often involves promoting understanding and collaboration."
]
},
{
"screen_type": "Text",
"text": "Congratulations, Cadet! You've completed the Galactic Logic Academy's unit on in-group bias. Remember, your ability to recognize and address this bias is key to fostering unity and cooperation across the galaxy. The stars are counting on you!",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
}
]
}

View File

@ -0,0 +1,118 @@
{
"unit": {
"Bias": "TheHaloEffect"
},
"title": "Galactic Logic Academy: Unveiling the Halo Effect",
"screens": [
{
"screen_type": "Text",
"text": "Welcome, Cadet! You've been selected for a mission to uncover the mysteries of cognitive biases that influence decisions across the galaxy. Today, we'll focus on the halo effect—a bias that can distort perceptions and lead to flawed judgments. Ready to sharpen your reasoning skills? Let's begin!",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Text",
"text": "The halo effect occurs when our overall impression of someone or something influences our judgment of their specific traits. For example, if a planetary leader is charismatic, we might assume they're also competent, even without evidence. This bias can lead to errors in decision-making.",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Text",
"text": "The halo effect often arises from first impressions. A single positive trait, like physical appearance or eloquence, can overshadow other important factors. As a cadet, your mission is to identify and counteract this bias to make fair and logical decisions.",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Text",
"text": "Imagine a planetary leader who is admired for their bravery in battle. Due to the halo effect, citizens might assume they're also skilled in diplomacy, even if their track record suggests otherwise. This can lead to misplaced trust and poor outcomes.",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Mcq",
"text": "",
"question": "Which of the following is an example of the halo effect?",
"options": [
"Assuming a charismatic leader is also a competent economist.",
"Believing a quiet individual is less intelligent.",
"Judging a starship's performance based solely on its sleek design."
],
"correct_indices": [0, 2],
"correct_text": null,
"hints": [
"The halo effect involves letting one positive trait influence unrelated judgments."
]
},
{
"screen_type": "Text",
"text": "The halo effect can also work in reverse, where a negative trait overshadows positive ones. For instance, if a planetary leader is perceived as unkind, citizens might assume they're also incompetent, even if evidence suggests otherwise.",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Checkboxes",
"text": "",
"question": "Which of the following scenarios demonstrate the halo effect? Select all that apply.",
"options": [
"Believing a well-dressed diplomat is more trustworthy.",
"Assuming a starship with a sleek design is faster than others.",
"Judging a scientist's intelligence based on their speaking style.",
"Assuming a leader with a poor public image is also ineffective in governance.",
"Believing a planet with beautiful landscapes must have advanced technology."
],
"correct_indices": [0, 1, 2, 3],
"correct_text": null,
"hints": [
"The halo effect often involves assumptions based on unrelated traits."
]
},
{
"screen_type": "Text",
"text": "To counteract the halo effect, focus on objective evidence rather than subjective impressions. Evaluate each trait or decision independently, and be mindful of how first impressions might influence your judgment.",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Mcq",
"text": "",
"question": "How can you avoid the halo effect when evaluating a planetary leader?",
"options": [
"Focus on their track record and evidence of competence.",
"Rely on first impressions and public opinion.",
"Assume their positive traits apply to all areas of leadership."
],
"correct_indices": [0],
"correct_text": null,
"hints": [
"Avoid letting one trait influence your judgment of unrelated traits."
]
},
{
"screen_type": "Text",
"text": "Congratulations, Cadet! You've completed the Galactic Logic Academy's unit on the halo effect. Remember, recognizing and countering cognitive biases like the halo effect is essential for making fair and logical decisions. The galaxy is counting on your clarity of thought!",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
}
]
}

View File

@ -0,0 +1,110 @@
{
"unit": {
"Fallacy": {
"FormalFallacy": "ProbabilisticFallacy"
}
},
"title": "Galactic Logic Academy: Mastering Probabilistic Reasoning",
"screens": [
{
"screen_type": "Text",
"text": "Welcome, Cadet! Your next mission is to explore the realm of probabilistic reasoning. Misunderstandings in probability can lead to flawed decisions, and your task is to identify and correct these errors to ensure the galaxy thrives. Ready to dive in? Let's go!",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Text",
"text": "Probabilistic fallacies occur when reasoning about probabilities is flawed. These errors can lead to incorrect conclusions and misguided actions. As a cadet, your mission is to recognize and address these fallacies to maintain logical consistency.",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Text",
"text": "One common probabilistic fallacy is the gambler's fallacy. For example, a planetary leader might argue, 'Our planet has experienced five years of drought, so we are due for a year of rain.' This reasoning is flawed because past events do not influence independent probabilities.",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Mcq",
"text": "",
"question": "Which of the following is an example of the gambler's fallacy?",
"options": [
"A planetary leader believes that after five years of drought, rain is more likely this year.",
"A scientist argues that the probability of rain remains constant regardless of past weather.",
"A trader assumes that a coin flip will land heads because the last five flips were tails."
],
"correct_indices": [0, 2],
"correct_text": null,
"hints": [
"The gambler's fallacy assumes that past independent events affect future probabilities."
]
},
{
"screen_type": "Text",
"text": "Another probabilistic fallacy is base rate neglect. For instance, a leader might argue, 'A new technology has a 90% success rate, so it will definitely work for our planet,' without considering the base rate of success across similar planets.",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Mcq",
"text": "",
"question": "Which of the following demonstrates base rate neglect?",
"options": [
"Ignoring the overall success rate of a technology when evaluating its effectiveness.",
"Assuming that a rare disease is common because a test has a high accuracy rate.",
"Considering both the base rate and the test accuracy when evaluating a diagnosis."
],
"correct_indices": [0, 1],
"correct_text": null,
"hints": [
"Base rate neglect occurs when the general probability of an event is ignored."
]
},
{
"screen_type": "Text",
"text": "The conjunction fallacy occurs when people assume that specific conditions are more probable than a single general one. For example, 'A planetary leader is more likely to be a scientist and a diplomat than just a scientist.' This reasoning is flawed.",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Checkboxes",
"text": "",
"question": "Which of the following are examples of the conjunction fallacy? Select all that apply.",
"options": [
"Believing that a planetary leader is more likely to be a scientist and a diplomat than just a scientist.",
"Assuming that a starship is more likely to be fast and durable than just fast.",
"Claiming that a planet is more likely to be habitable and resource-rich than just habitable.",
"Assuming that a rare event is more likely when combined with another rare event."
],
"correct_indices": [0, 1, 2],
"correct_text": null,
"hints": [
"The conjunction fallacy assumes that a combination of events is more likely than a single event."
]
},
{
"screen_type": "Text",
"text": "Congratulations, Cadet! You've completed the Galactic Logic Academy's unit on probabilistic fallacies. Your ability to identify these errors will help ensure logical decision-making across the galaxy. Keep up the great work!",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
}
]
}

View File

@ -0,0 +1,142 @@
{
"unit": {
"Fallacy": {
"FormalFallacy": "PropositionalFallacy"
}
},
"title": "Galactic Logic Academy: Rescuing the Galaxy Through Reason",
"screens": [
{
"screen_type": "Text",
"text": "Hey, Cadet! You've been chosen to embark on a mission to rescue the galaxy from chaos. As disputes arise between interplanetary leaders, your task is to use logic to settle conflicts and restore harmony. Ready to save the stars? Let's begin!",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Text",
"text": "In the galaxy, misunderstandings often arise due to flawed reasoning. A propositional fallacy occurs when an argument's logical structure is flawed, leading to invalid conclusions. As a cadet, your mission is to identify and dismantle these fallacies.",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Text",
"text": "Remember the last couple of units you completed? They were all about propositional logic. A propositional fallacy is simply a violation of one of those rules",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Text",
"text": "One common propositional fallacy is affirming the consequent. Imagine a planetary leader claiming, 'If our planet has peace, then trade will flourish. Trade is flourishing, so we must have peace.' This reasoning is invalid and could lead to misguided decisions.",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Mcq",
"text": "",
"question": "Which of the following is an example of affirming the consequent?",
"options": [
"If the starship is repaired, it will fly. The starship is flying, so it must be repaired.",
"If the starship is repaired, it will fly. It is not repaired, so it will not fly.",
"If the starship is repaired, it will fly. It is repaired, so it will fly."
],
"correct_indices": [0],
"correct_text": null,
"hints": [
"Affirming the consequent assumes that Q being true implies P is true."
]
},
{
"screen_type": "Text",
"text": "Another common fallacy is denying the antecedent. For example, a leader might argue, 'If our planet has peace, then trade will flourish. Our planet does not have peace, so trade cannot flourish.' This reasoning is also invalid.",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Mcq",
"text": "",
"question": "Which of the following is an example of denying the antecedent?",
"options": [
"If the starship is repaired, it will fly. It is not repaired, so it will not fly.",
"If the starship is repaired, it will fly. The starship is flying, so it must be repaired.",
"If the starship is repaired, it will fly. It is repaired, so it will fly."
],
"correct_indices": [0],
"correct_text": null,
"hints": [
"Denying the antecedent assumes that P being false implies Q is false."
]
},
{
"screen_type": "Text",
"text": "The fallacy of false equivalence occurs when two things are incorrectly treated as equivalent, despite significant differences. For instance, equating a minor planetary dispute to an intergalactic war could lead to disastrous decisions.",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Checkboxes",
"text": "",
"question": "Which of the following are examples of false equivalence? Select all that apply.",
"options": [
"Claiming that two planetary leaders are equally corrupt because both have been accused of wrongdoing.",
"Equating a minor trade disagreement to a full-scale planetary invasion.",
"Assuming that two scientific theories about space travel are equally valid because they are both theories.",
"Saying that a small asteroid and a large planet are equally dangerous because they both have mass.",
"Claiming that two spaceships are colorful because they are both bright red.",
"Claiming that two planets are equally habitable because they both have water.",
"Assuming that two different species are equally intelligent because they both have advanced technology."
],
"correct_indices": [0, 1, 2, 3, 5, 6],
"correct_text": null,
"hints": [
"False equivalence often ignores important differences between the compared items."
]
},
{
"screen_type": "Text",
"text": "The fallacy of begging the question occurs when an argument's conclusion is assumed in its premises. For example, 'The Galactic Council's decisions are just because the Galactic Council always makes just decisions.' This creates a circular argument.",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Mcq",
"text": "",
"question": "Is this an example of begging the question? The Galactic Council is fair because it says so in its charter, and the charter is fair because the Galactic Council wrote it.",
"options": ["Yes", "No"],
"correct_indices": [0],
"correct_text": null,
"hints": [
"Look for arguments where the conclusion is assumed in the premises."
]
},
{
"screen_type": "Text",
"text": "Congratulations, Cadet! You've completed the Galactic Logic Academy's unit on propositional fallacies. Remember, your ability to identify these fallacies is key to resolving disputes and maintaining peace across the galaxy. The stars are counting on you!",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
}
]
}

View File

@ -0,0 +1,110 @@
{
"unit": {
"Fallacy": {
"FormalFallacy": "QuantificationalFallacy"
}
},
"title": "Galactic Logic Academy: Mastering Quantificational Fallacies",
"screens": [
{
"screen_type": "Text",
"text": "Welcome back, Cadet! The galaxy faces new challenges, and this time, your mission is to tackle quantificational fallacies. These errors in reasoning arise when quantifiers like 'all,' 'some,' or 'none' are misused. Ready to sharpen your logical skills? Let's dive in!",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Text",
"text": "Quantificational fallacies occur when arguments involving quantifiers fail to follow logical rules. Missteps in reasoning can lead to flawed conclusions, and as a cadet, your task is to identify and correct these errors.",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Text",
"text": "One common quantificational fallacy is the fallacy of illicit conversion. For example, a planetary leader might argue, 'All peaceful planets trade fairly. Therefore, all planets that trade fairly are peaceful.' This reasoning is invalid.",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Mcq",
"text": "",
"question": "Which of the following is an example of the fallacy of illicit conversion?",
"options": [
"All starships that are fast are well-maintained. Therefore, all well-maintained starships are fast.",
"Some planets with water are habitable. Therefore, some habitable planets have water.",
"No planets with toxic atmospheres are habitable. Therefore, no habitable planets have toxic atmospheres."
],
"correct_indices": [0],
"correct_text": null,
"hints": [
"Illicit conversion occurs when the direction of a universal statement is reversed without justification."
]
},
{
"screen_type": "Text",
"text": "Another quantificational fallacy is the fallacy of existential instantiation. For instance, 'Some planets are habitable. Therefore, there exists a specific habitable planet named X.' This leap in reasoning is flawed.",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Mcq",
"text": "",
"question": "Which of the following is an example of the fallacy of existential instantiation?",
"options": [
"Some starships are fast. Therefore, there exists a specific starship named 'Speedster' that is fast.",
"All planets with water are habitable. Therefore, some habitable planets have water.",
"No planets with toxic atmospheres are habitable. Therefore, no habitable planets have toxic atmospheres."
],
"correct_indices": [0],
"correct_text": null,
"hints": [
"Existential instantiation assumes the existence of a specific instance without evidence."
]
},
{
"screen_type": "Text",
"text": "The fallacy of undistributed middle occurs when a shared term in two premises is not distributed, leading to an invalid conclusion. For example, 'All starships are fast. All fast objects are valuable. Therefore, all starships are valuable.'",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Checkboxes",
"text": "",
"question": "Which of the following are examples of the fallacy of undistributed middle? Select all that apply.",
"options": [
"All planets with water are habitable. All habitable planets are valuable. Therefore, all planets with water are valuable.",
"All starships are fast. All fast objects are valuable. Therefore, all starships are valuable.",
"All leaders are wise. All wise beings are respected. Therefore, all leaders are respected.",
"All planets with life are unique. All unique planets are rare. Therefore, all planets with life are rare."
],
"correct_indices": [0, 1, 3],
"correct_text": null,
"hints": [
"The middle term must be distributed in at least one premise for the argument to be valid."
]
},
{
"screen_type": "Text",
"text": "Congratulations, Cadet! You've completed the Galactic Logic Academy's unit on quantificational fallacies. Your ability to identify these errors will help maintain peace and reason across the galaxy. The stars shine brighter with your wisdom!",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
}
]
}

View File

@ -0,0 +1,110 @@
{
"unit": {
"Fallacy": {
"FormalFallacy": "SyllogisticFallacy"
}
},
"title": "Galactic Logic Academy: Mastering Syllogistic Reasoning",
"screens": [
{
"screen_type": "Text",
"text": "Welcome, Cadet! Your next mission is to explore the realm of syllogistic reasoning. Syllogistic fallacies occur when errors are made in categorical reasoning, leading to invalid conclusions. Ready to sharpen your logical skills? Let's dive in!",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Text",
"text": "Syllogistic reasoning involves arguments with categorical statements, such as 'All A are B' or 'Some A are not B.' A syllogistic fallacy arises when the logical structure of these arguments is flawed.",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Text",
"text": "One common syllogistic fallacy is the fallacy of the undistributed middle. For example: 'All planets with water are habitable. Earth is habitable. Therefore, Earth has water.' This reasoning is invalid because the middle term ('habitable') is not distributed.",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Mcq",
"text": "",
"question": "Which of the following is an example of the fallacy of the undistributed middle?",
"options": [
"All starships are fast. Some fast things are spaceships. Therefore, all starships are spaceships.",
"All planets with water are habitable. Earth is habitable. Therefore, Earth has water.",
"All asteroids are rocky. Some rocky things are planets. Therefore, all asteroids are planets."
],
"correct_indices": [1],
"correct_text": null,
"hints": [
"The fallacy of the undistributed middle occurs when the middle term is not distributed in at least one premise."
]
},
{
"screen_type": "Text",
"text": "Another syllogistic fallacy is illicit major or minor. For example: 'All planets are celestial bodies. No stars are planets. Therefore, no stars are celestial bodies.' This reasoning is invalid because it illicitly distributes the major term ('celestial bodies').",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Mcq",
"text": "",
"question": "Which of the following is an example of an illicit major term?",
"options": [
"All planets are celestial bodies. No stars are planets. Therefore, no stars are celestial bodies.",
"All asteroids are rocky. Some rocky things are planets. Therefore, all asteroids are planets.",
"All starships are fast. Some fast things are spaceships. Therefore, all starships are spaceships."
],
"correct_indices": [0],
"correct_text": null,
"hints": [
"An illicit major occurs when the major term is distributed in the conclusion but not in the premise."
]
},
{
"screen_type": "Text",
"text": "The fallacy of exclusive premises occurs when both premises in a syllogism are negative. For example: 'No planets are stars. No stars are asteroids. Therefore, no planets are asteroids.' This reasoning is invalid because it violates the rules of syllogistic logic.",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Checkboxes",
"text": "",
"question": "Which of the following are examples of the fallacy of exclusive premises? Select all that apply.",
"options": [
"No planets are stars. No stars are asteroids. Therefore, no planets are asteroids.",
"No asteroids are planets. No planets are stars. Therefore, no asteroids are stars.",
"No celestial bodies are asteroids. No asteroids are planets. Therefore, no celestial bodies are planets.",
"No planets are habitable. No habitable things are stars. Therefore, no planets are stars."
],
"correct_indices": [0, 1, 2],
"correct_text": null,
"hints": [
"Exclusive premises occur when both premises are negative, making it impossible to draw a valid conclusion."
]
},
{
"screen_type": "Text",
"text": "Congratulations, Cadet! You've completed the Galactic Logic Academy's unit on syllogistic fallacies. Your ability to identify these fallacies will help you navigate complex arguments and maintain peace across the galaxy. Keep up the great work!",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
}
]
}

View File

@ -0,0 +1,96 @@
{
"unit": {
"Fallacy": {
"InformalFallacy": "AdHominem"
}
},
"title": "Galactic Logic Academy: Defending the Stars with Reason",
"screens": [
{
"screen_type": "Text",
"text": "Welcome back, Cadet! Your mission to bring logic and reason to the galaxy continues. This time, you'll learn about the ad hominem fallacy, a common error in reasoning that can derail interplanetary diplomacy. Ready to sharpen your skills? Let's dive in!",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Text",
"text": "In the galaxy, debates often become heated, and instead of addressing the argument, individuals attack the person making it. This is known as an ad hominem fallacy. It undermines productive dialogue and leads to unnecessary conflict.",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Text",
"text": "An ad hominem fallacy occurs when someone rejects or criticizes an argument by attacking the character, motive, or other attribute of the person making the argument, rather than addressing the substance of the argument itself.",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Text",
"text": "For example, imagine a planetary leader saying, 'We shouldn't listen to Ambassador Zorak's proposal for peace because Zorak comes from a planet known for its dishonesty.' This attack on Zorak's character does not address the merits of the proposal.",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Mcq",
"text": "",
"question": "Which of the following is an example of an ad hominem fallacy?",
"options": [
"Rejecting a scientist's theory because they have been wrong in the past.",
"Disagreeing with a planetary leader's policy because it lacks evidence.",
"Criticizing a proposal because it contradicts established research."
],
"correct_indices": [0],
"correct_text": null,
"hints": [
"Focus on whether the argument attacks the person rather than addressing the argument itself."
]
},
{
"screen_type": "Text",
"text": "Ad hominem fallacies can take many forms, such as attacking someone's appearance, background, or affiliations. These tactics distract from the real issues and prevent meaningful discussions.",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Checkboxes",
"text": "",
"question": "Which of the following are examples of ad hominem fallacies? Select all that apply.",
"options": [
"Rejecting a diplomat's argument because they come from a small planet.",
"Dismissing a scientist's findings because they have an unusual hairstyle.",
"Ignoring a leader's proposal because they are from a rival faction.",
"Criticizing a policy because it lacks logical reasoning.",
"Rejecting a theory because it contradicts existing evidence."
],
"correct_indices": [0, 1, 2],
"correct_text": null,
"hints": [
"Ad hominem fallacies focus on the person rather than the argument."
]
},
{
"screen_type": "Text",
"text": "Congratulations, Cadet! You've completed the Galactic Logic Academy's unit on the ad hominem fallacy. Remember, addressing arguments with logic and reason is the key to fostering peace and understanding across the galaxy. Keep up the great work!",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
}
]
}

View File

@ -0,0 +1,85 @@
{
"unit": {
"Fallacy": {
"InformalFallacy": "AdIgnorantiam"
}
},
"title": "Galactic Logic Academy: Unveiling the Truth Beyond Ignorance",
"screens": [
{
"screen_type": "Text",
"text": "Welcome, Cadet! Today, we embark on a mission to uncover the truth behind the ad ignorantiam fallacy, also known as the appeal to ignorance. This fallacy arises when someone claims something is true simply because it hasn't been proven false, or vice versa. Ready to sharpen your reasoning skills? Let's dive in!",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Text",
"text": "In the galaxy, disputes often arise when leaders rely on flawed reasoning. The ad ignorantiam fallacy occurs when a lack of evidence is used as evidence itself. For example, 'No one has proven that aliens don't exist, so they must exist.' This reasoning is invalid and can lead to misguided decisions.",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Text",
"text": "Remember, Cadet, the absence of evidence is not evidence of absence. Just because something hasn't been proven false doesn't mean it's true, and vice versa. This is the core of the ad ignorantiam fallacy.",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Mcq",
"text": "",
"question": "Which of the following is an example of the ad ignorantiam fallacy?",
"options": [
"No one has proven that intergalactic travel is impossible, so it must be possible.",
"If the starship is repaired, it will fly. It is repaired, so it will fly.",
"If our planet has peace, then trade will flourish. Trade is flourishing, so we must have peace."
],
"correct_indices": [0],
"correct_text": null,
"hints": [
"Look for reasoning that relies on the lack of evidence as proof."
]
},
{
"screen_type": "Text",
"text": "The ad ignorantiam fallacy can also be used to dismiss claims. For example, 'No one has proven that this new technology works, so it must not work.' This reasoning is equally flawed.",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Mcq",
"text": "",
"question": "Which of the following dismisses a claim using the ad ignorantiam fallacy?",
"options": [
"No one has proven that this new propulsion system works, so it must not work.",
"If the propulsion system is repaired, it will function. It is repaired, so it will function.",
"If the propulsion system is repaired, it will function. It is not repaired, so it will not function."
],
"correct_indices": [0],
"correct_text": null,
"hints": [
"Look for reasoning that dismisses a claim due to lack of evidence."
]
},
{
"screen_type": "Text",
"text": "Congratulations, Cadet! You've completed the Galactic Logic Academy's unit on the ad ignorantiam fallacy. Remember, sound reasoning requires evidence, not the absence of it. Your ability to identify this fallacy will help you bring clarity and truth to the galaxy. The stars are counting on you!",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
}
]
}

View File

@ -0,0 +1,87 @@
{
"unit": {
"Fallacy": {
"InformalFallacy": "Bandwagon"
}
},
"title": "Galactic Logic Academy: Avoiding the Bandwagon Trap",
"screens": [
{
"screen_type": "Text",
"text": "Welcome back, Cadet! In this unit, you'll learn about the bandwagon fallacy, a common reasoning error where people adopt a belief simply because others do. Your mission is to recognize and resist this fallacy to make sound, independent decisions. Ready to begin?",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Text",
"text": "The bandwagon fallacy occurs when someone argues that a belief or action is correct simply because it is popular. For example, 'Everyone in the galaxy believes this, so it must be true.' Popularity does not guarantee truth.",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Text",
"text": "In the galaxy, leaders often fall into the bandwagon trap when making decisions. As a cadet, your role is to identify when this fallacy is influencing arguments and help others see the flaws in such reasoning.",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Mcq",
"text": "",
"question": "Which of the following is an example of the bandwagon fallacy?",
"options": [
"Everyone on the council agrees that this policy is the best, so it must be the right choice.",
"If the starship is repaired, it will fly. It is repaired, so it will fly.",
"The Galactic Council's decisions are just because the Galactic Council always makes just decisions."
],
"correct_indices": [0],
"correct_text": null,
"hints": [
"The bandwagon fallacy relies on popularity as evidence for correctness."
]
},
{
"screen_type": "Text",
"text": "The bandwagon fallacy can lead to poor decisions, especially when the majority is misinformed. For example, 'Everyone believes this planet is uninhabitable, so it must be true,' might ignore critical evidence to the contrary.",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Checkboxes",
"text": "",
"question": "Which of the following are examples of the bandwagon fallacy? Select all that apply.",
"options": [
"Everyone in the galaxy uses this navigation system, so it must be the best.",
"This starship design is popular among engineers, so it must be the safest.",
"The majority of planetary leaders support this trade agreement, so it must be beneficial.",
"If the starship is repaired, it will fly. It is repaired, so it will fly.",
"Everyone believes this asteroid is harmless, so it must be safe to ignore."
],
"correct_indices": [0, 1, 2, 4],
"correct_text": null,
"hints": [
"Look for arguments that rely on popularity rather than evidence."
]
},
{
"screen_type": "Text",
"text": "Congratulations, Cadet! You've completed the Galactic Logic Academy's unit on the bandwagon fallacy. Remember, just because an idea is popular doesn't mean it's correct. Stay vigilant and think critically to ensure peace and reason prevail across the galaxy!",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
}
]
}

View File

@ -0,0 +1,87 @@
{
"unit": {
"Fallacy": {
"InformalFallacy": "BeggingTheQuestion"
}
},
"title": "Galactic Logic Academy: Unraveling Circular Reasoning",
"screens": [
{
"screen_type": "Text",
"text": "Welcome back, Cadet! Today, we dive into the fallacy of begging the question. This logical misstep occurs when an argument assumes its conclusion in its premises, creating a circular loop. Ready to sharpen your reasoning skills? Let's begin!",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Text",
"text": "Begging the question often hides behind seemingly valid arguments. For example, 'The Galactic Council's decisions are just because the Galactic Council always makes just decisions.' Notice how the conclusion is assumed in the premise?",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Text",
"text": "This fallacy can lead to flawed reasoning and poor decision-making. As a cadet, your mission is to identify and dismantle these circular arguments to ensure logical clarity across the galaxy.",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Mcq",
"text": "",
"question": "Which of the following is an example of begging the question?",
"options": [
"The Galactic Council is fair because it says so in its charter, and the charter is fair because the Galactic Council wrote it.",
"If the starship is repaired, it will fly. The starship is flying, so it must be repaired.",
"The planet is habitable because it has water, and water is necessary for life."
],
"correct_indices": [0],
"correct_text": null,
"hints": [
"Look for arguments where the conclusion is assumed in the premises."
]
},
{
"screen_type": "Text",
"text": "Begging the question can also appear in subtle forms. For instance, 'We know the Galactic Charter is perfect because it was written by the wisest beings in the galaxy, and we know they are wise because they wrote the Galactic Charter.'",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Checkboxes",
"text": "",
"question": "Which of the following are examples of begging the question? Select all that apply.",
"options": [
"The Galactic Council's decisions are just because the Galactic Council always makes just decisions.",
"The starship is the fastest because it is faster than all other starships.",
"The planetary shield is impenetrable because no one can penetrate it.",
"The Galactic Charter is perfect because it was written by the wisest beings, and they are wise because they wrote the charter.",
"The planet is habitable because it has water, and water is necessary for life."
],
"correct_indices": [0, 2, 3],
"correct_text": null,
"hints": [
"Focus on arguments where the conclusion is directly or indirectly assumed in the premises."
]
},
{
"screen_type": "Text",
"text": "Congratulations, Cadet! You've mastered the art of identifying the fallacy of begging the question. Your ability to spot circular reasoning will help maintain logical integrity across the galaxy. Keep up the great work!",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
}
]
}

View File

@ -0,0 +1,111 @@
{
"unit": {
"Fallacy": {
"InformalFallacy": "HastyGeneralization"
}
},
"title": "Galactic Logic Academy: Avoiding Hasty Generalizations",
"screens": [
{
"screen_type": "Text",
"text": "Welcome back, Cadet! The galaxy needs your sharp reasoning skills once again. This time, your mission is to tackle a common informal fallacy: hasty generalization. Ready to dive in and bring clarity to the stars? Let's begin!",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Text",
"text": "In the galaxy, leaders often make decisions based on limited evidence. A hasty generalization occurs when someone draws a conclusion about a group or situation based on insufficient or biased data. This can lead to unfair or inaccurate judgments.",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Text",
"text": "For example, imagine a planetary leader saying, 'I met two traders from Planet X, and they were both dishonest. Therefore, all traders from Planet X must be dishonest.' This is a hasty generalization because the conclusion is based on a small and unrepresentative sample.",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Mcq",
"text": "",
"question": "Which of the following is an example of a hasty generalization?",
"options": [
"I saw two meteors from Sector 7, and they were both red. Therefore, all meteors from Sector 7 must be red.",
"If the starship is repaired, it will fly. It is repaired, so it will fly.",
"If our planet has peace, then trade will flourish. Trade is flourishing, so we must have peace."
],
"correct_indices": [0],
"correct_text": null,
"hints": [
"Hasty generalizations often involve conclusions based on small or biased samples."
]
},
{
"screen_type": "Text",
"text": "Hasty generalizations can also occur when someone assumes that a single event represents a trend. For instance, 'The first alien we met from Planet Y was hostile, so all aliens from Planet Y must be hostile.' This reasoning is flawed and can lead to unnecessary conflict.",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Checkboxes",
"text": "",
"question": "Which of the following are examples of hasty generalizations? Select all that apply.",
"options": [
"I visited one city on Planet Z, and it was polluted. Therefore, the entire planet must be polluted.",
"I met three scientists from Sector 5, and they were all brilliant. Therefore, all scientists from Sector 5 must be brilliant.",
"I saw one spaceship from the Andromeda system, and it was fast. Therefore, all spaceships from Andromeda must be fast.",
"I heard one song from a band, and it was great. Therefore, all their songs must be great.",
"I tried one dish from a galactic cuisine, and it was spicy. Therefore, all dishes from that cuisine must be spicy."
],
"correct_indices": [0, 1, 2, 3, 4],
"correct_text": null,
"hints": [
"Look for conclusions drawn from limited or unrepresentative evidence."
]
},
{
"screen_type": "Text",
"text": "To avoid hasty generalizations, always seek more evidence and consider whether your sample is representative. Remember, Cadet, careful reasoning is the key to maintaining peace and understanding across the galaxy.",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Mcq",
"text": "",
"question": "How can you avoid making a hasty generalization?",
"options": [
"Gather more evidence and ensure your sample is representative.",
"Make conclusions quickly to save time.",
"Rely on your first impression of a situation."
],
"correct_indices": [0],
"correct_text": null,
"hints": [
"Avoiding hasty generalizations requires careful consideration of evidence."
]
},
{
"screen_type": "Text",
"text": "Congratulations, Cadet! You've completed the Galactic Logic Academy's unit on hasty generalizations. Your ability to recognize and avoid this fallacy will help you make fair and accurate decisions. The galaxy is counting on your wisdom!",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
}
]
}

View File

@ -0,0 +1,87 @@
{
"unit": {
"Fallacy": {
"InformalFallacy": "NoTrueScotsman"
}
},
"title": "Galactic Logic Academy: The No True Scotsman Fallacy",
"screens": [
{
"screen_type": "Text",
"text": "Welcome back, Cadet! Today, we dive into another logical fallacy that often arises in debates across the galaxy: the No True Scotsman fallacy. Your mission is to understand and identify this fallacy to ensure fair and logical reasoning in interplanetary discussions. Ready to sharpen your skills? Let's go!",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Text",
"text": "The No True Scotsman fallacy occurs when someone dismisses counterexamples to a generalization by redefining the criteria to exclude them. This fallacy often appears in arguments about identity, behavior, or membership in a group.",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Text",
"text": "For example, imagine a planetary leader claiming, 'No true citizen of Planet X would ever betray their people.' When presented with evidence of a citizen who did, they respond, 'Well, they aren't a true citizen of Planet X.' This is the No True Scotsman fallacy in action.",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Mcq",
"text": "",
"question": "Which of the following is an example of the No True Scotsman fallacy?",
"options": [
"No true scientist would ever question established theories. When a scientist does, they are dismissed as not being a true scientist.",
"If the starship is repaired, it will fly. The starship is flying, so it must be repaired.",
"If our planet has peace, then trade will flourish. Trade is flourishing, so we must have peace."
],
"correct_indices": [0],
"correct_text": null,
"hints": [
"Look for arguments where counterexamples are dismissed by redefining the group."
]
},
{
"screen_type": "Text",
"text": "The No True Scotsman fallacy can undermine productive discussions by dismissing valid counterexamples. Recognizing this fallacy is key to fostering logical and inclusive debates.",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Checkboxes",
"text": "",
"question": "Which of the following are examples of the No True Scotsman fallacy? Select all that apply.",
"options": [
"No true explorer would fear the unknown. When an explorer expresses fear, they are dismissed as not being a true explorer.",
"No true artist would create digital art. When a digital artist is mentioned, they are dismissed as not being a true artist.",
"No true leader would ever make a mistake. When a leader makes a mistake, they are dismissed as not being a true leader.",
"No true scientist would ever question established theories. When a scientist does, they are dismissed as not being a true scientist.",
"No true starship engineer would use outdated technology. When one does, they are dismissed as not being a true starship engineer."
],
"correct_indices": [0, 1, 2, 3, 4],
"correct_text": null,
"hints": [
"The fallacy involves redefining group membership to exclude counterexamples."
]
},
{
"screen_type": "Text",
"text": "Congratulations, Cadet! You've completed the Galactic Logic Academy's unit on the No True Scotsman fallacy. Remember, logical reasoning is essential for maintaining peace and understanding across the galaxy. Keep up the great work!",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
}
]
}

View File

@ -0,0 +1,116 @@
{
"unit": {
"Fallacy": {
"InformalFallacy": "OverGeneralization"
}
},
"title": "Galactic Logic Academy: Guarding the Stars Against Overgeneralization",
"screens": [
{
"screen_type": "Text",
"text": "Greetings, Cadet! The galaxy faces a new challenge: overgeneralization. This fallacy arises when conclusions are drawn from insufficient or biased evidence. Your mission is to identify and counteract overgeneralizations to ensure fair and logical decision-making across the stars. Ready to take on this challenge? Let's begin!",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Text",
"text": "Overgeneralization occurs when a conclusion is made about a group or situation based on limited or unrepresentative evidence. For example, assuming all planets in a star system are hostile because one planet attacked a trade ship is an overgeneralization.",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Text",
"text": "Overgeneralizations can lead to misunderstandings and conflict. As a cadet, your task is to identify these flawed arguments and ensure decisions are based on sound reasoning and sufficient evidence.",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Text",
"text": "Consider this example: 'The last two starships from Planet X were poorly constructed. Therefore, all starships from Planet X must be poorly constructed.' This is an overgeneralization because it assumes a pattern based on limited evidence.",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Mcq",
"text": "",
"question": "Which of the following is an example of overgeneralization?",
"options": [
"The last two starships from Planet X were poorly constructed. Therefore, all starships from Planet X must be poorly constructed.",
"If the starship is repaired, it will fly. It is repaired, so it will fly.",
"The Galactic Council always makes fair decisions because it is composed of wise leaders."
],
"correct_indices": [0],
"correct_text": null,
"hints": [
"Overgeneralization involves making a broad claim based on limited evidence."
]
},
{
"screen_type": "Text",
"text": "Another example of overgeneralization: 'Two traders from Planet Y were dishonest, so all traders from Planet Y must be dishonest.' This unfairly labels an entire group based on the actions of a few.",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Checkboxes",
"text": "",
"question": "Which of the following are examples of overgeneralization? Select all that apply.",
"options": [
"All planets in the Zeta system are hostile because one planet attacked a trade ship.",
"Two traders from Planet Y were dishonest, so all traders from Planet Y must be dishonest.",
"The last two starships from Planet X were poorly constructed. Therefore, all starships from Planet X must be poorly constructed.",
"The Galactic Council always makes fair decisions because it is composed of wise leaders.",
"If the starship is repaired, it will fly. It is repaired, so it will fly."
],
"correct_indices": [0, 1, 2],
"correct_text": null,
"hints": [
"Look for claims that generalize based on limited or biased evidence."
]
},
{
"screen_type": "Text",
"text": "Overgeneralizations can lead to prejudice and poor decision-making. By identifying and challenging these fallacies, you help ensure fairness and logic prevail across the galaxy.",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Mcq",
"text": "",
"question": "Is this an example of overgeneralization? 'The last two ambassadors from Planet Z were rude, so all ambassadors from Planet Z must be rude.'",
"options": ["Yes", "No"],
"correct_indices": [0],
"correct_text": null,
"hints": [
"Does the conclusion rely on limited evidence to make a broad claim?"
]
},
{
"screen_type": "Text",
"text": "Congratulations, Cadet! You've completed the Galactic Logic Academy's unit on overgeneralization. Your ability to identify and counteract this fallacy is vital for maintaining peace and fairness across the galaxy. The stars shine brighter with your efforts!",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
}
]
}

View File

@ -0,0 +1,107 @@
{
"unit": {
"Fallacy": {
"InformalFallacy": "PostHocErgoPropterHoc"
}
},
"title": "Galactic Logic Academy: Unraveling the Post Hoc Fallacy",
"screens": [
{
"screen_type": "Text",
"text": "Welcome back, Cadet! Your mission to rescue the galaxy continues. This time, you'll tackle a common informal fallacy that often leads to faulty reasoning: post hoc ergo propter hoc. Ready to sharpen your logic and save the stars? Let's dive in!",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Text",
"text": "In the galaxy, leaders sometimes make decisions based on flawed assumptions. The post hoc fallacy occurs when someone assumes that because one event follows another, the first event must have caused the second. This can lead to misguided conclusions.",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Text",
"text": "For example, imagine a planetary leader claiming, 'Our planet's economy improved after we adopted a new flag design. Therefore, the new flag caused the economic growth.' This reasoning is flawed and overlooks other possible factors.",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Mcq",
"text": "",
"question": "Which of the following is an example of the post hoc fallacy?",
"options": [
"The starship's engines failed after we installed new software. Therefore, the software caused the failure.",
"The starship's engines failed because they were not maintained properly.",
"The starship's engines failed after a meteor shower damaged the hull."
],
"correct_indices": [0],
"correct_text": null,
"hints": [
"Post hoc reasoning assumes causation based solely on the order of events."
]
},
{
"screen_type": "Text",
"text": "The post hoc fallacy can also appear in more subtle forms. For instance, a leader might argue, 'Our planet's weather improved after we banned interstellar travel. Therefore, the ban caused the better weather.' This ignores other potential explanations.",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Checkboxes",
"text": "",
"question": "Which of the following are examples of the post hoc fallacy? Select all that apply.",
"options": [
"A planet's crime rate dropped after a new anthem was introduced. Therefore, the anthem caused the drop in crime.",
"A starship's fuel efficiency improved after its hull was painted. Therefore, the paint caused the improvement.",
"A planetary leader's popularity increased after they started wearing a new uniform. Therefore, the uniform caused the popularity boost.",
"A planet's crops flourished after a comet passed nearby. Therefore, the comet caused the flourishing crops.",
"A starship's navigation system failed after a solar flare. Therefore, the solar flare caused the failure."
],
"correct_indices": [0, 1, 2, 3],
"correct_text": null,
"hints": [
"Look for examples where causation is assumed without evidence beyond the sequence of events."
]
},
{
"screen_type": "Text",
"text": "To avoid the post hoc fallacy, always consider alternative explanations and look for evidence of a causal relationship. Correlation does not imply causation, Cadet!",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Mcq",
"text": "",
"question": "Is this an example of the post hoc fallacy? A planet's economy improved after a new trade agreement was signed. Therefore, the trade agreement caused the improvement.",
"options": ["Yes", "No"],
"correct_indices": [0],
"correct_text": null,
"hints": [
"Does the argument assume causation based solely on the sequence of events?"
]
},
{
"screen_type": "Text",
"text": "Congratulations, Cadet! You've completed the Galactic Logic Academy's unit on the post hoc fallacy. Remember, your ability to identify and challenge flawed reasoning is vital to maintaining peace and harmony across the galaxy. The stars are counting on you!",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
}
]
}

View File

@ -0,0 +1,107 @@
{
"unit": {
"Fallacy": {
"InformalFallacy": "QuotingOutOfContext"
}
},
"title": "Galactic Logic Academy: The Perils of Quoting Out of Context",
"screens": [
{
"screen_type": "Text",
"text": "Welcome back, Cadet! Your next mission is to tackle a subtle yet dangerous fallacy: quoting out of context. Misusing someone's words can lead to misunderstandings and conflict. Let's explore how to identify and avoid this fallacy. Ready? Let's dive in!",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Text",
"text": "Quoting out of context occurs when a statement is taken out of its original setting, altering its intended meaning. This can mislead others and distort the truth. As a cadet, your mission is to recognize and address this fallacy.",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Text",
"text": "Imagine a planetary leader saying, 'We must prepare for peace, not war.' If someone quotes only 'We must prepare for war,' it completely changes the meaning. This is an example of quoting out of context.",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Mcq",
"text": "",
"question": "Which of the following is an example of quoting out of context?",
"options": [
"A scientist says, 'This theory is promising but unproven,' and someone quotes, 'This theory is promising.'",
"A leader says, 'We must act now to save our planet,' and someone quotes the entire statement.",
"A historian says, 'The war was devastating, but it led to peace,' and someone quotes, 'The war was devastating.'"
],
"correct_indices": [0, 2],
"correct_text": null,
"hints": [
"Look for cases where the quoted text changes the original meaning."
]
},
{
"screen_type": "Text",
"text": "Quoting out of context can be used to manipulate opinions or spread misinformation. For example, quoting only part of a speech to make it seem like the speaker supports something they actually oppose.",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Checkboxes",
"text": "",
"question": "Which of the following are examples of quoting out of context? Select all that apply.",
"options": [
"A diplomat says, 'We must consider all options, including negotiation,' and someone quotes, 'We must consider all options.'",
"A scientist says, 'This experiment failed, but it taught us valuable lessons,' and someone quotes, 'This experiment failed.'",
"A leader says, 'Our planet is facing challenges, but we are resilient,' and someone quotes, 'Our planet is facing challenges.'",
"A historian says, 'The treaty was controversial, but it brought peace,' and someone quotes, 'The treaty was controversial.'",
"A teacher says, 'Hard work and honesty lead to success,' and someone quotes the entire statement."
],
"correct_indices": [0, 1, 2, 3],
"correct_text": null,
"hints": [
"Focus on examples where the quoted text omits key context."
]
},
{
"screen_type": "Text",
"text": "To avoid quoting out of context, always consider the full context of a statement. Ask yourself: What was the speaker's intent? What is the broader message? This will help you maintain integrity in communication.",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Mcq",
"text": "",
"question": "Is this an example of quoting out of context? A leader says, 'We must prepare for peace, not war,' and someone quotes, 'We must prepare for war.'",
"options": ["Yes", "No"],
"correct_indices": [0],
"correct_text": null,
"hints": [
"Does the quoted text change the original meaning?"
]
},
{
"screen_type": "Text",
"text": "Congratulations, Cadet! You've completed the Galactic Logic Academy's unit on quoting out of context. Remember, accurate representation of others' words is essential for fostering trust and understanding. The galaxy is counting on your integrity!",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
}
]
}

View File

@ -0,0 +1,110 @@
{
"unit": {
"Fallacy": {
"InformalFallacy": "SlipperySlope"
}
},
"title": "Galactic Logic Academy: Navigating the Slippery Slope",
"screens": [
{
"screen_type": "Text",
"text": "Welcome back, Cadet! The galaxy needs your sharp reasoning skills once again. This time, your mission is to tackle the slippery slope fallacy—a common error in reasoning that can lead to unnecessary fear and poor decision-making. Ready to dive in? Let's begin!",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Text",
"text": "In the galaxy, leaders often face complex decisions. The slippery slope fallacy occurs when someone argues that a relatively small action will inevitably lead to a chain of significant and undesirable events, without sufficient evidence to support such a claim.",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Text",
"text": "For example, imagine a planetary leader saying, 'If we allow one small asteroid mining operation, soon the entire planet will be overrun by mining corporations, and our ecosystem will be destroyed.' This reasoning assumes a catastrophic chain of events without evidence.",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Mcq",
"text": "",
"question": "Which of the following is an example of a slippery slope fallacy?",
"options": [
"If we allow one spaceship to land, soon the entire galaxy will be colonized by aliens.",
"If we repair the starship, it will fly again.",
"If we don't repair the starship, it will remain grounded."
],
"correct_indices": [0],
"correct_text": null,
"hints": [
"Look for arguments that predict extreme outcomes without sufficient evidence."
]
},
{
"screen_type": "Text",
"text": "The slippery slope fallacy often relies on fear and exaggeration. While it's important to consider potential consequences, it's equally important to evaluate whether the chain of events is truly inevitable or supported by evidence.",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Checkboxes",
"text": "",
"question": "Which of the following statements are examples of slippery slope reasoning? Select all that apply.",
"options": [
"If we allow one planet to join the Galactic Council, soon every planet will demand membership, and the Council will collapse.",
"If we allow trade with one alien species, it will lead to intergalactic economic chaos.",
"If we allow peaceful negotiations, it will lead to a lasting peace.",
"If we allow one asteroid mining operation, it will lead to the destruction of our planet's ecosystem."
],
"correct_indices": [0, 1, 3],
"correct_text": null,
"hints": [
"Slippery slope arguments often predict extreme outcomes without evidence."
]
},
{
"screen_type": "Text",
"text": "To avoid falling into the slippery slope fallacy, always ask: Is there evidence to support the claim that one action will lead to a chain of events? Are there other factors that could prevent the predicted outcome?",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Mcq",
"text": "",
"question": "How can you identify a slippery slope fallacy?",
"options": [
"Look for arguments that predict extreme outcomes without evidence.",
"Look for arguments that rely on circular reasoning.",
"Look for arguments that compare two unrelated things."
],
"correct_indices": [0],
"correct_text": null,
"hints": [
"Slippery slope arguments often lack evidence for the predicted chain of events."
]
},
{
"screen_type": "Text",
"text": "Congratulations, Cadet! You've successfully completed the Galactic Logic Academy's unit on the slippery slope fallacy. Remember, your ability to recognize and challenge flawed reasoning is essential for maintaining peace and order in the galaxy. The stars shine brighter with your wisdom!",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
}
]
}

View File

@ -0,0 +1,87 @@
{
"unit": {
"Fallacy": {
"InformalFallacy": "SpecialPleading"
}
},
"title": "Galactic Logic Academy: Uncovering the Truth Behind Special Pleading",
"screens": [
{
"screen_type": "Text",
"text": "Welcome back, Cadet! The galaxy needs your sharp reasoning skills once again. This time, your mission is to uncover and dismantle the fallacy of special pleading. Ready to bring clarity to the cosmos? Let's dive in!",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Text",
"text": "Special pleading occurs when someone applies rules or standards to others while exempting themselves or their argument without valid justification. This fallacy often hides behind emotional appeals or vague exceptions.",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Text",
"text": "Imagine a planetary leader arguing, 'All planets must contribute resources to the Galactic Defense Fund, but our planet is an exception because we are special.' Without a valid reason, this is an example of special pleading.",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Mcq",
"text": "",
"question": "Which of the following is an example of special pleading?",
"options": [
"All starships must follow the speed limit, but our starship is an exception because we are on an important mission.",
"If the starship is repaired, it will fly. It is repaired, so it will fly.",
"All planets must contribute resources, but our planet is exempt because we are smaller than others."
],
"correct_indices": [0, 2],
"correct_text": null,
"hints": [
"Look for arguments where exceptions are made without valid justification."
]
},
{
"screen_type": "Text",
"text": "Special pleading can also occur when someone dismisses evidence or rules that contradict their argument. For example, 'Our planet's leader cannot be corrupt because they are a good person.' This avoids addressing the evidence directly.",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Checkboxes",
"text": "",
"question": "Which of the following are examples of special pleading? Select all that apply.",
"options": [
"Claiming that a planetary law applies to all citizens except the royal family because they are 'above the law.'",
"Arguing that a starship's safety regulations don't apply to a specific crew because they are 'experienced pilots.'",
"Saying that a planetary leader's actions cannot be criticized because they have 'good intentions.'",
"Assuming that a planet's environmental policies don't need to be followed because they are 'too small to make a difference.'",
"Claiming that a scientific theory is invalid because it contradicts personal beliefs."
],
"correct_indices": [0, 1, 2, 3],
"correct_text": null,
"hints": [
"Special pleading often involves creating exceptions without valid reasoning."
]
},
{
"screen_type": "Text",
"text": "Congratulations, Cadet! You've completed the Galactic Logic Academy's unit on special pleading. Remember, identifying and addressing this fallacy is crucial for ensuring fairness and consistency across the galaxy. The stars are brighter because of your efforts!",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
}
]
}

View File

@ -0,0 +1,111 @@
{
"unit": {
"Fallacy": {
"InformalFallacy": "StrawMan"
}
},
"title": "Galactic Logic Academy: Defending the Galaxy from Misrepresentation",
"screens": [
{
"screen_type": "Text",
"text": "Greetings, Cadet! The galaxy is under threat from a new kind of chaos: misrepresentation. Your mission is to learn about the Straw Man Fallacy, a deceptive tactic that distorts arguments and derails discussions. Ready to defend the stars with reason? Let's begin!",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Text",
"text": "In the galaxy, leaders sometimes misrepresent their opponents' arguments to make them easier to attack. This is known as the Straw Man Fallacy. By distorting the original argument, they create a 'straw man'—a weaker version that is easier to defeat.",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Text",
"text": "For example, imagine a planetary leader proposing to allocate more resources to education. An opponent might respond, 'So you're saying we should abandon our planetary defenses and leave ourselves vulnerable to attack?' This misrepresents the original argument, creating a straw man.",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Mcq",
"text": "",
"question": "Which of the following is an example of a Straw Man Fallacy?",
"options": [
"A leader proposes reducing energy consumption, and an opponent responds, 'So you want us to live in the dark ages without any technology?'",
"A leader argues for increased trade, and an opponent responds, 'So you're saying we should ignore our own planet's needs entirely?'",
"A leader suggests improving healthcare, and an opponent responds, 'So you want to bankrupt the entire galaxy?'"
],
"correct_indices": [0, 1, 2],
"correct_text": null,
"hints": [
"Look for responses that distort the original argument into something extreme or unrelated."
]
},
{
"screen_type": "Text",
"text": "The Straw Man Fallacy is dangerous because it shifts the focus away from the real issue. Instead of addressing the actual argument, it attacks a distorted version, leading to misunderstandings and conflict.",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Checkboxes",
"text": "",
"question": "Which of the following are examples of the Straw Man Fallacy? Select all that apply.",
"options": [
"A leader suggests exploring renewable energy, and an opponent responds, 'So you want to shut down all traditional energy sources immediately?'",
"A leader proposes stricter environmental regulations, and an opponent responds, 'So you want to destroy our economy?'",
"A leader advocates for peace talks, and an opponent responds, 'So you want to surrender to our enemies?'",
"A leader suggests increasing taxes on luxury goods, and an opponent responds, 'So you want to tax everyone into poverty?'",
"A leader argues for better public transportation, and an opponent responds, 'So you want to ban all personal starships?'"
],
"correct_indices": [0, 1, 2, 3, 4],
"correct_text": null,
"hints": [
"Straw Man Fallacies often exaggerate or misrepresent the original argument to make it easier to attack."
]
},
{
"screen_type": "Text",
"text": "To counter the Straw Man Fallacy, always clarify the original argument. Ask questions, seek understanding, and ensure that discussions remain focused on the real issues. This is how you can maintain peace and reason in the galaxy.",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Mcq",
"text": "",
"question": "How can you effectively counter a Straw Man Fallacy?",
"options": [
"Clarify the original argument and restate it accurately.",
"Ignore the misrepresentation and move on.",
"Attack the person making the misrepresentation."
],
"correct_indices": [0],
"correct_text": null,
"hints": [
"Focus on understanding and addressing the real argument."
]
},
{
"screen_type": "Text",
"text": "Congratulations, Cadet! You've completed the Galactic Logic Academy's unit on the Straw Man Fallacy. By identifying and countering this fallacy, you are helping to ensure that discussions remain fair and productive. The galaxy is safer thanks to your efforts!",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
}
]
}

View File

@ -0,0 +1,110 @@
{
"unit": {
"Fallacy": {
"InformalFallacy": "TexasSharpshooter"
}
},
"title": "Galactic Logic Academy: The Texas Sharpshooter Fallacy",
"screens": [
{
"screen_type": "Text",
"text": "Welcome back, Cadet! The galaxy needs your sharp reasoning skills once again. This time, your mission is to uncover and dismantle the Texas Sharpshooter Fallacy—a deceptive reasoning pattern that can mislead even the wisest interplanetary leaders. Ready to aim for the truth? Let's begin!",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Text",
"text": "The Texas Sharpshooter Fallacy occurs when someone cherry-picks data to fit a specific conclusion, ignoring data that doesn't support it. It's like a sharpshooter firing at a barn and then painting a target around the bullet holes to make it look like they hit the bullseye.",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Text",
"text": "In the galaxy, this fallacy can lead to dangerous decisions. For example, a planetary leader might claim their economic policy is a success by highlighting a few thriving cities while ignoring widespread poverty elsewhere. Your task is to spot and challenge such flawed reasoning.",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Mcq",
"text": "",
"question": "Which of the following is an example of the Texas Sharpshooter Fallacy?",
"options": [
"A scientist claims their theory is correct because they found a few data points that support it, ignoring the majority of data that contradicts it.",
"A leader argues that their planet is peaceful because there have been no wars in the last year, despite ongoing civil unrest.",
"A trader claims their business is booming because one product is selling well, while ignoring losses in other areas."
],
"correct_indices": [0, 2],
"correct_text": null,
"hints": [
"Look for cases where data is selectively used to support a conclusion."
]
},
{
"screen_type": "Text",
"text": "The Texas Sharpshooter Fallacy often arises when people confuse correlation with causation. For example, a planetary leader might argue that their planet's prosperity is due to their policies, simply because the economy improved during their term, without considering other factors.",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Checkboxes",
"text": "",
"question": "Which of the following are examples of confusing correlation with causation? Select all that apply.",
"options": [
"Claiming that a planet's increased trade is due to a new treaty, without considering other economic factors.",
"Arguing that a starship's improved performance is due to a new paint job, ignoring mechanical upgrades.",
"Assuming that a planet's peace is due to a new leader, without examining other contributing factors.",
"Believing that a species' intelligence is due to their diet, without studying other influences."
],
"correct_indices": [0, 2, 3],
"correct_text": null,
"hints": [
"Correlation does not always imply causation. Look for missing context or ignored variables."
]
},
{
"screen_type": "Text",
"text": "To avoid the Texas Sharpshooter Fallacy, always consider the full range of data and avoid cherry-picking. Ask yourself: Are there other explanations? Is the data being selectively presented to fit a narrative?",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Mcq",
"text": "",
"question": "How can you avoid falling into the Texas Sharpshooter Fallacy?",
"options": [
"Consider all available data, not just the data that supports your conclusion.",
"Focus only on the data that aligns with your argument.",
"Ignore contradictory evidence to strengthen your case."
],
"correct_indices": [0],
"correct_text": null,
"hints": [
"Avoid selective reasoning and ensure your conclusions are based on comprehensive evidence."
]
},
{
"screen_type": "Text",
"text": "Congratulations, Cadet! You've completed the Galactic Logic Academy's unit on the Texas Sharpshooter Fallacy. Remember, your ability to identify and challenge this fallacy is crucial for maintaining truth and justice across the galaxy. The stars shine brighter with your wisdom!",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
}
]
}

View File

@ -0,0 +1,111 @@
{
"unit": {
"Fallacy": {
"InformalFallacy": "TuQuoque"
}
},
"title": "Galactic Logic Academy: Deflecting the Tu Quoque Trap",
"screens": [
{
"screen_type": "Text",
"text": "Welcome back, Cadet! The galaxy faces a new challenge: leaders are deflecting valid criticisms by pointing out the flaws of others. This tactic, known as the tu quoque fallacy, undermines reasoned debate. Your mission is to identify and counter this fallacy to restore productive dialogue. Ready to take on the challenge? Let's begin!",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Text",
"text": "The tu quoque fallacy, also known as the 'you too' fallacy, occurs when someone responds to criticism by accusing their critic of the same or similar behavior, rather than addressing the argument itself. This deflection distracts from the issue at hand and prevents meaningful resolution.",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Text",
"text": "For example, imagine a planetary leader being criticized for polluting their planet. Instead of addressing the criticism, they respond, 'Well, your planet pollutes too!' This response does not justify their actions or address the original concern.",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Mcq",
"text": "",
"question": "Which of the following is an example of the tu quoque fallacy?",
"options": [
"A leader responds to criticism about corruption by saying, 'You're corrupt too!'",
"A leader explains their actions by providing evidence and reasoning.",
"A leader acknowledges the criticism and promises to improve."
],
"correct_indices": [0],
"correct_text": null,
"hints": [
"Look for responses that deflect criticism by accusing the critic of similar behavior."
]
},
{
"screen_type": "Text",
"text": "The tu quoque fallacy is problematic because it shifts the focus away from the original argument. Even if the critic is guilty of similar behavior, it does not invalidate their criticism or justify the actions being criticized.",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Checkboxes",
"text": "",
"question": "Which of the following are examples of the tu quoque fallacy? Select all that apply.",
"options": [
"A planetary leader responds to criticism about their environmental policies by saying, 'Your planet has environmental issues too!'",
"A leader accused of dishonesty replies, 'Well, you've lied before!'",
"A leader criticized for high taxes responds, 'Other planets have higher taxes!'",
"A leader accused of unfair trade practices says, 'Your planet's trade practices are worse!'",
"A leader responds to criticism by providing evidence that their policies are effective."
],
"correct_indices": [0, 1, 2, 3],
"correct_text": null,
"hints": [
"Focus on responses that deflect criticism by pointing out flaws in the critic."
]
},
{
"screen_type": "Text",
"text": "To counter the tu quoque fallacy, focus on the original argument. Acknowledge any valid points made by the critic, but avoid deflecting by pointing out their flaws. This approach fosters constructive dialogue and helps resolve disputes.",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Mcq",
"text": "",
"question": "How can you effectively counter the tu quoque fallacy?",
"options": [
"Focus on the original argument and avoid deflecting.",
"Respond by pointing out the critic's flaws.",
"Ignore the criticism entirely."
],
"correct_indices": [0],
"correct_text": null,
"hints": [
"The goal is to address the argument, not deflect or ignore it."
]
},
{
"screen_type": "Text",
"text": "Congratulations, Cadet! You've successfully completed the Galactic Logic Academy's unit on the tu quoque fallacy. By identifying and countering this fallacy, you are helping to promote reasoned debate and restore harmony across the galaxy. The stars shine brighter because of your efforts!",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
}
]
}

140
data/introduction.json Normal file
View File

@ -0,0 +1,140 @@
{
"unit": "Introduction",
"title": "Welcome to the Galactic Academy",
"screens": [
{
"screen_type": "Text",
"text": "Greetings, Cadet! Welcome to the Galactic Academy. As you know, our galaxy is in grave danger. We need brave souls like you to help us navigate the cosmos and communicate with alien species. Your training begins now!",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Text",
"text": "We have been stuck in space for a long time, and we need to learn how to communicate with aliens. You see, they are capable of extremely precise logical thinking, and we need intergalactic diplomats who can reason with them. This is where you come in!.",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Text",
"text": "Our spaceships are equipped with the latest technology, including warp drives and advanced communication systems. Before we set you loose on the galaxy, we need to ensure you understand the basics of communication.",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Text",
"text": "This advanced technology will save your progress as you learn, so you can pick up right where you left off. Leave at any time by pressing CTRL+C, or by using the menu option at the end of each unit.",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Text",
"text": "We have built a translator that can help you communicate with aliens. It is important to remember that while the translator is powerful, it is not infallible. You must learn to think critically and adapt your communication style to different species.",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Text",
"text": "Sometimes, the translator will give you a few options that it thinks you will want to choose from. In the next question, choose the option that you think is the best.",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Mcq",
"text": "",
"question": "Greetings, human! What is the answer to the ultimate question of life, the universe, and everything?\n(You can get a hint if you need it)",
"options": [
"42",
"To boldly go where no one has gone before",
"To explore the mysteries of the universe"
],
"correct_indices": [0],
"correct_text": null,
"hints": [
"It's a number that has puzzled philosophers and scientists alike.",
"It was popularized by a famous science fiction series."
]
},
{
"screen_type": "Text",
"text": "I see you're mastering the skills of intergalactic communication! Remember, the key to successful diplomacy is understanding and respecting the culture of the species you are communicating with. To that end, some questions have more than one right answer.",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Checkboxes",
"text": "",
"question": "Which of the following are key principles of alien diplomacy? Select all that apply.",
"options": [
"Always speak louder to assert dominance",
"Respect cultural differences",
"Offer Earth artifacts as gifts without consent",
"Communicate clearly and effectively"
],
"correct_indices": [1,3],
"correct_text": null,
"hints": [
"Understanding and respecting cultural differences is crucial.",
"Effective communication is about clarity and respect."
]
},
{
"screen_type": "Text",
"text": "Not bad, Cadet! You've shown a good understanding of the principles of alien diplomacy. Now, let's move on to some practical exercises. Remember, the key to successful communication is to be adaptable and open-minded. Sometimes, the translator won't give you any options, so you'll have to type it in yourself.",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "FreeResponse",
"text": "",
"question": "How many fingers does the 9-Fingered Space Badger have?",
"options": null,
"correct_indices": null,
"correct_text": ["9", "nine"],
"hints": [
"Its name is the 9-Fingered Space Badger."
]
},
{
"screen_type": "Text",
"text": "Congratulations, Cadet! You've completed the training. Prepare for your first mission: charting a course to Proxima Centauri!",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Text",
"text": " ^\n / \\\n / \\\n / \\\n /_______\\\n | |\n | | |\n | | |\n | | |\n /_______\\\n | |\n |_________|\n / \\\n | |\n |_______|",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
}
]
}

View File

@ -0,0 +1,160 @@
{
"unit": {
"Logic": "BooleanAlgebra"
},
"title": "Mastering Boolean Algebra",
"screens": [
{
"screen_type": "Text",
"text": "Welcome, Cadet! You've been selected to join the Galactic Programming Fleet's advanced training program. Your mission: master the principles of Boolean algebra to solve complex logical problems and optimize your code. Ready to elevate your skills? Let's begin!",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Text",
"text": "Boolean algebra is a branch of algebra that deals with true and false values. It provides a framework for simplifying logical expressions and is widely used in programming, circuit design, and more.",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Text",
"text": "In Boolean algebra, we use operations like AND, OR, and NOT, but we also work with laws and properties such as the commutative, associative, distributive, identity, and complement laws. These help us simplify and analyze logical expressions.",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Text",
"text": "For example, the commutative law states that the order of operands does not matter for AND and OR operations. A AND B is the same as B AND A, and A OR B is the same as B OR A.",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Mcq",
"text": "",
"question": "Which of the following is true according to the commutative law?",
"options": [
"A AND B is the same as B AND A.",
"A OR B is the same as B OR A.",
"Both of the above."
],
"correct_indices": [2],
"correct_text": null,
"hints": [
"The commutative law applies to both AND and OR operations."
]
},
{
"screen_type": "Text",
"text": "The associative law states that the grouping of operands does not affect the result for AND and OR operations. For example, (A AND B) AND C is the same as A AND (B AND C).",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Mcq",
"text": "",
"question": "Which of the following expressions is equivalent to (A OR B) OR C according to the associative law?",
"options": [
"A OR (B OR C)",
"(A AND B) OR C",
"A AND (B AND C)"
],
"correct_indices": [0],
"correct_text": null,
"hints": [
"The associative law applies to OR operations."
]
},
{
"screen_type": "Text",
"text": "The distributive law allows us to distribute AND over OR and vice versa. For example, A AND (B OR C) is equivalent to (A AND B) OR (A AND C).",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Checkboxes",
"text": "",
"question": "Which of the following are valid applications of the distributive law? Select all that apply.",
"options": [
"A AND (B OR C) = (A AND B) OR (A AND C)",
"A OR (B AND C) = (A OR B) AND (A OR C)",
"A AND (B AND C) = (A AND B) AND C"
],
"correct_indices": [0, 1],
"correct_text": null,
"hints": [
"The distributive law applies to both AND over OR and OR over AND."
]
},
{
"screen_type": "Text",
"text": "The identity law states that A AND true is A, and A OR false is A. These identities help simplify expressions by removing redundant terms.",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "FreeResponse",
"text": "",
"question": "What does A OR false simplify to according to the identity law?",
"options": null,
"correct_indices": null,
"correct_text": ["A", "a"],
"hints": [
"The identity law states that A OR false is A."
]
},
{
"screen_type": "Text",
"text": "The complement law states that A AND NOT A is false, and A OR NOT A is true. These laws are fundamental in Boolean algebra.",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Mcq",
"text": "",
"question": "What does A AND NOT A evaluate to according to the complement law?",
"options": [
"True",
"False",
"A"
],
"correct_indices": [1],
"correct_text": null,
"hints": [
"The complement law states that A AND NOT A is always false."
]
},
{
"screen_type": "Text",
"text": "Congratulations, Cadet! You've completed the course on Boolean algebra. Remember, mastering these principles will help you simplify logical expressions and write efficient code. Keep practicing and applying these concepts in your projects!",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
}
]
}

View File

@ -0,0 +1,205 @@
{
"unit": {
"Logic": "LogicalOperations"
},
"title": "Mastering Logical Operations",
"screens": [
{
"screen_type": "Text",
"text": "Welcome, Cadet! You've just been recruited to the Galactic Programming Fleet. Your mission: master the art of logical operations to navigate through complex coding challenges and save the galaxy from bad logic. Ready to embark on this adventure? Let's dive in!",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Text",
"text": "Logical operations are used to make logical decisions. They allow us to compare values and decide what action to take next. In this course, you'll learn about AND, OR, and NOT operations.",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Text",
"text": "The president of Planet Zog has to keep her population safe and happy. She has a lot of decisions to make, and she needs to use logical operations to help her. For example, if the weather is nice AND the streets are clean, people can go to work. If the weather is NOT nice OR the streets are covered in slime, everyone should stay home.",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Text",
"text": "There are two logical operators in this rule. The first is AND, which means both conditions must be true for the result to be true. The second is OR, which means at least one condition must be true for the result to be true.",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Text",
"text": "The AND operation evaluates to `true` if both conditions are true. For example, if condition A and condition B are both true, the result is true. Otherwise, it's false.",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Text",
"text": "The OR operation evaluates to `true` if at least one condition is true. For example, if condition A or condition B is true, the result is true. If both are false, the result is false.",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Text",
"text": "The NOT operation inverts a condition. If a condition is true, NOT makes it false, and vice versa. It's a simple but powerful tool in logical reasoning.",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Mcq",
"text": "",
"question": "Which of the following is true for the AND operation?",
"options": [
"It evaluates to true if both conditions are true.",
"It evaluates to true if at least one condition is true.",
"It inverts the condition."
],
"correct_indices": [0],
"correct_text": null,
"hints": [
"Both conditions must be true for the result to be true."
]
},
{
"screen_type": "Text",
"text": "Now let's test your understanding of the OR operation. Remember, it evaluates to true if at least one condition is true.",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Checkboxes",
"text": "",
"question": "Which of the following are true for the OR operation? Select all that apply.",
"options": [
"It evaluates to true only if both conditions are true.",
"It evaluates to true if at least one condition is true.",
"It evaluates to false only if both conditions are false."
],
"correct_indices": [1, 2],
"correct_text": null,
"hints": [
"OR is inclusive, meaning it evaluates to true if either or both conditions are true.",
"It only evaluates to false when both conditions are false."
]
},
{
"screen_type": "Text",
"text": "You're doing great! Now let's move on to the NOT operation. Remember, it inverts the condition.",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "FreeResponse",
"text": "",
"question": "If a condition is true, what does the NOT operation evaluate to?",
"options": null,
"correct_indices": null,
"correct_text": ["false", "f"],
"hints": [
"NOT inverts the condition. If it's true, it becomes false."
]
},
{
"screen_type": "Text",
"text": "Let's try some more realistic examples. A programmer on Planet Zog needs to write a program that checks if the weather is nice and the streets are clean. If both conditions are true, the program should allow people to go to work.",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
},
{
"screen_type": "Mcq",
"text": "",
"question": "If the streets are clean AND the weather is NOT nice, what should the program do?",
"options": [
"Allow people to go to work.",
"Keep people at home."
],
"correct_indices": [1],
"correct_text": null,
"hints": [
"Both conditions must be true for the result to be true."
]
},
{
"screen_type": "Mcq",
"text": "",
"question": "If the streets are covered in slime AND the weather is NOT nice, what should the program do?",
"options": [
"Allow people to go to work.",
"Keep people at home."
],
"correct_indices": [1],
"correct_text": null,
"hints": [
"Both conditions must be true for the result to be true."
]
},
{
"screen_type": "Mcq",
"text": "",
"question": "If the streets are covered in slime OR the weather is NOT nice, what should the program do?",
"options": [
"Allow people to go to work.",
"Keep people at home."
],
"correct_indices": [1],
"correct_text": null,
"hints": [
"Both conditions must be true for the result to be true."
]
},
{
"screen_type": "Mcq",
"text": "",
"question": "If the streets are clean AND the weather is nice, what should the program do?",
"options": [
"Allow people to go to work.",
"Keep people at home."
],
"correct_indices": [0],
"correct_text": null,
"hints": [
"Both conditions must be true for the result to be true."
]
},
{
"screen_type": "Text",
"text": "Congratulations, Cadet! You've completed the course on logical operations. Remember, mastering these concepts is key to becoming a great programmer. Keep practicing and applying these skills in your projects!",
"question": null,
"options": null,
"correct_indices": null,
"correct_text": null,
"hints": null
}
]
}

495
saves/tasouva.json Normal file
View File

@ -0,0 +1,495 @@
{
"name": "Tasouva",
"filename": "saves/tasouva.json",
"progress": {
"{\"Fallacy\":{\"FormalFallacy\":\"SyllogisticFallacy\"}}": {
"unit": {
"Fallacy": {
"FormalFallacy": "SyllogisticFallacy"
}
},
"questions_answered": 8,
"questions_correct": 3,
"probability_known": 1.0,
"results": [
false,
true,
false,
true,
false,
false,
true,
false
],
"milestones": 3,
"milestones_completed": 1
},
"\"Introduction\"": {
"unit": "Introduction",
"questions_answered": 3,
"questions_correct": 3,
"probability_known": 1.0,
"results": [
true,
true,
true
],
"milestones": 3,
"milestones_completed": 3
},
"{\"Bias\":\"BarnumEffect\"}": {
"unit": {
"Bias": "BarnumEffect"
},
"questions_answered": 3,
"questions_correct": 2,
"probability_known": 1.0,
"results": [
true,
false,
true
],
"milestones": 3,
"milestones_completed": 2
},
"{\"Fallacy\":{\"FormalFallacy\":\"PropositionalFallacy\"}}": {
"unit": {
"Fallacy": {
"FormalFallacy": "PropositionalFallacy"
}
},
"questions_answered": 8,
"questions_correct": 6,
"probability_known": 1.0,
"results": [
true,
true,
false,
true,
true,
true,
false,
true
],
"milestones": 4,
"milestones_completed": 4
},
"{\"Fallacy\":{\"InformalFallacy\":\"HastyGeneralization\"}}": {
"unit": {
"Fallacy": {
"InformalFallacy": "HastyGeneralization"
}
},
"questions_answered": 3,
"questions_correct": 2,
"probability_known": 1.0,
"results": [
true,
false,
true
],
"milestones": 3,
"milestones_completed": 2
},
"{\"Fallacy\":{\"InformalFallacy\":\"QuotingOutOfContext\"}}": {
"unit": {
"Fallacy": {
"InformalFallacy": "QuotingOutOfContext"
}
},
"questions_answered": 3,
"questions_correct": 2,
"probability_known": 1.0,
"results": [
true,
false,
true
],
"milestones": 3,
"milestones_completed": 2
},
"{\"Fallacy\":{\"InformalFallacy\":\"PostHocErgoPropterHoc\"}}": {
"unit": {
"Fallacy": {
"InformalFallacy": "PostHocErgoPropterHoc"
}
},
"questions_answered": 3,
"questions_correct": 2,
"probability_known": 1.0,
"results": [
true,
false,
true
],
"milestones": 3,
"milestones_completed": 2
},
"{\"Bias\":\"TheHaloEffect\"}": {
"unit": {
"Bias": "TheHaloEffect"
},
"questions_answered": 3,
"questions_correct": 2,
"probability_known": 1.0,
"results": [
true,
false,
true
],
"milestones": 3,
"milestones_completed": 2
},
"{\"Fallacy\":{\"InformalFallacy\":\"SpecialPleading\"}}": {
"unit": {
"Fallacy": {
"InformalFallacy": "SpecialPleading"
}
},
"questions_answered": 2,
"questions_correct": 1,
"probability_known": 0.9234167893961708,
"results": [
true,
false
],
"milestones": 2,
"milestones_completed": 1
},
"{\"Fallacy\":{\"InformalFallacy\":\"AdIgnorantiam\"}}": {
"unit": {
"Fallacy": {
"InformalFallacy": "AdIgnorantiam"
}
},
"questions_answered": 2,
"questions_correct": 2,
"probability_known": 1.0,
"results": [
true,
true
],
"milestones": 2,
"milestones_completed": 2
},
"{\"Fallacy\":{\"InformalFallacy\":\"BeggingTheQuestion\"}}": {
"unit": {
"Fallacy": {
"InformalFallacy": "BeggingTheQuestion"
}
},
"questions_answered": 2,
"questions_correct": 1,
"probability_known": 0.9234167893961708,
"results": [
true,
false
],
"milestones": 2,
"milestones_completed": 1
},
"{\"Fallacy\":{\"FormalFallacy\":\"QuantificationalFallacy\"}}": {
"unit": {
"Fallacy": {
"FormalFallacy": "QuantificationalFallacy"
}
},
"questions_answered": 3,
"questions_correct": 2,
"probability_known": 1.0,
"results": [
true,
true,
false
],
"milestones": 3,
"milestones_completed": 2
},
"{\"Fallacy\":{\"InformalFallacy\":\"TexasSharpshooter\"}}": {
"unit": {
"Fallacy": {
"InformalFallacy": "TexasSharpshooter"
}
},
"questions_answered": 3,
"questions_correct": 2,
"probability_known": 1.0,
"results": [
true,
false,
true
],
"milestones": 3,
"milestones_completed": 2
},
"{\"Fallacy\":{\"InformalFallacy\":\"TuQuoque\"}}": {
"unit": {
"Fallacy": {
"InformalFallacy": "TuQuoque"
}
},
"questions_answered": 3,
"questions_correct": 2,
"probability_known": 1.0,
"results": [
true,
false,
true
],
"milestones": 3,
"milestones_completed": 2
},
"{\"Fallacy\":{\"InformalFallacy\":\"AdHominem\"}}": {
"unit": {
"Fallacy": {
"InformalFallacy": "AdHominem"
}
},
"questions_answered": 2,
"questions_correct": 1,
"probability_known": 0.9234167893961708,
"results": [
true,
false
],
"milestones": 2,
"milestones_completed": 1
},
"{\"Fallacy\":{\"InformalFallacy\":\"OverGeneralization\"}}": {
"unit": {
"Fallacy": {
"InformalFallacy": "OverGeneralization"
}
},
"questions_answered": 3,
"questions_correct": 2,
"probability_known": 1.0,
"results": [
true,
false,
true
],
"milestones": 3,
"milestones_completed": 2
},
"{\"Bias\":\"FundamentalAttributionError\"}": {
"unit": {
"Bias": "FundamentalAttributionError"
},
"questions_answered": 3,
"questions_correct": 1,
"probability_known": 0.7508982035928139,
"results": [
true,
false,
false
],
"milestones": 3,
"milestones_completed": 1
},
"{\"Appraisal\":\"CounterArgument\"}": {
"unit": {
"Appraisal": "CounterArgument"
},
"questions_answered": 3,
"questions_correct": 2,
"probability_known": 1.0,
"results": [
true,
true,
false
],
"milestones": 3,
"milestones_completed": 2
},
"{\"Fallacy\":{\"InformalFallacy\":\"StrawMan\"}}": {
"unit": {
"Fallacy": {
"InformalFallacy": "StrawMan"
}
},
"questions_answered": 3,
"questions_correct": 2,
"probability_known": 1.0,
"results": [
true,
false,
true
],
"milestones": 3,
"milestones_completed": 2
},
"{\"Fallacy\":{\"InformalFallacy\":\"SlipperySlope\"}}": {
"unit": {
"Fallacy": {
"InformalFallacy": "SlipperySlope"
}
},
"questions_answered": 3,
"questions_correct": 2,
"probability_known": 1.0,
"results": [
true,
false,
true
],
"milestones": 3,
"milestones_completed": 2
},
"{\"Appraisal\":\"ConversionToPropositional\"}": {
"unit": {
"Appraisal": "ConversionToPropositional"
},
"questions_answered": 12,
"questions_correct": 3,
"probability_known": 0.004850875121986713,
"results": [
false,
false,
false,
false,
false,
false,
false,
false,
true,
true,
false,
true
],
"milestones": 4,
"milestones_completed": 3
},
"{\"Bias\":\"InGroupBias\"}": {
"unit": {
"Bias": "InGroupBias"
},
"questions_answered": 3,
"questions_correct": 2,
"probability_known": 1.0,
"results": [
true,
true,
false
],
"milestones": 3,
"milestones_completed": 2
},
"{\"Fallacy\":{\"FormalFallacy\":\"ProbabilisticFallacy\"}}": {
"unit": {
"Fallacy": {
"FormalFallacy": "ProbabilisticFallacy"
}
},
"questions_answered": 3,
"questions_correct": 2,
"probability_known": 1.0,
"results": [
true,
true,
false
],
"milestones": 3,
"milestones_completed": 3
},
"{\"Bias\":\"ConfirmationBias\"}": {
"unit": {
"Bias": "ConfirmationBias"
},
"questions_answered": 3,
"questions_correct": 1,
"probability_known": 0.7508982035928139,
"results": [
true,
false,
false
],
"milestones": 3,
"milestones_completed": 1
},
"{\"Fallacy\":{\"InformalFallacy\":\"NoTrueScotsman\"}}": {
"unit": {
"Fallacy": {
"InformalFallacy": "NoTrueScotsman"
}
},
"questions_answered": 2,
"questions_correct": 1,
"probability_known": 0.9234167893961708,
"results": [
true,
false
],
"milestones": 2,
"milestones_completed": 1
},
"{\"Logic\":\"LogicalOperations\"}": {
"unit": {
"Logic": "LogicalOperations"
},
"questions_answered": 8,
"questions_correct": 7,
"probability_known": 1.0,
"results": [
true,
false,
true,
true,
true,
true,
true,
true
],
"milestones": 7,
"milestones_completed": 7
},
"{\"Bias\":\"DunningKrugerEffect\"}": {
"unit": {
"Bias": "DunningKrugerEffect"
},
"questions_answered": 3,
"questions_correct": 1,
"probability_known": 0.7508982035928139,
"results": [
true,
false,
false
],
"milestones": 3,
"milestones_completed": 1
},
"{\"Logic\":\"BooleanAlgebra\"}": {
"unit": {
"Logic": "BooleanAlgebra"
},
"questions_answered": 5,
"questions_correct": 5,
"probability_known": 1.0,
"results": [
true,
true,
true,
true,
true
],
"milestones": 5,
"milestones_completed": 5
},
"{\"Fallacy\":{\"InformalFallacy\":\"Bandwagon\"}}": {
"unit": {
"Fallacy": {
"InformalFallacy": "Bandwagon"
}
},
"questions_answered": 2,
"questions_correct": 1,
"probability_known": 0.9234167893961708,
"results": [
true,
false
],
"milestones": 2,
"milestones_completed": 1
}
}
}

363
saves/user1.json Normal file
View File

@ -0,0 +1,363 @@
{
"name": "User1",
"filename": "saves/user1.json",
"progress": {
"{\"Fallacy\":{\"FormalFallacy\":\"ProbabilisticFallacy\"}}": {
"unit": {
"Fallacy": {
"FormalFallacy": "ProbabilisticFallacy"
}
},
"questions_answered": 0,
"questions_correct": 0,
"probability_known": 0.3,
"results": [],
"milestones": 0,
"milestones_completed": 0
},
"{\"Fallacy\":{\"InformalFallacy\":\"QuotingOutOfContext\"}}": {
"unit": {
"Fallacy": {
"InformalFallacy": "QuotingOutOfContext"
}
},
"questions_answered": 0,
"questions_correct": 0,
"probability_known": 0.3,
"results": [],
"milestones": 0,
"milestones_completed": 0
},
"{\"Bias\":\"TheHaloEffect\"}": {
"unit": {
"Bias": "TheHaloEffect"
},
"questions_answered": 0,
"questions_correct": 0,
"probability_known": 0.3,
"results": [],
"milestones": 0,
"milestones_completed": 0
},
"{\"Bias\":\"FundamentalAttributionError\"}": {
"unit": {
"Bias": "FundamentalAttributionError"
},
"questions_answered": 0,
"questions_correct": 0,
"probability_known": 0.3,
"results": [],
"milestones": 0,
"milestones_completed": 0
},
"{\"Fallacy\":{\"InformalFallacy\":\"TexasSharpshooter\"}}": {
"unit": {
"Fallacy": {
"InformalFallacy": "TexasSharpshooter"
}
},
"questions_answered": 0,
"questions_correct": 0,
"probability_known": 0.3,
"results": [],
"milestones": 0,
"milestones_completed": 0
},
"{\"Bias\":\"DunningKrugerEffect\"}": {
"unit": {
"Bias": "DunningKrugerEffect"
},
"questions_answered": 0,
"questions_correct": 0,
"probability_known": 0.3,
"results": [],
"milestones": 0,
"milestones_completed": 0
},
"{\"Appraisal\":\"ConversionToPropositional\"}": {
"unit": {
"Appraisal": "ConversionToPropositional"
},
"questions_answered": 0,
"questions_correct": 0,
"probability_known": 0.3,
"results": [],
"milestones": 0,
"milestones_completed": 0
},
"\"Introduction\"": {
"unit": "Introduction",
"questions_answered": 3,
"questions_correct": 1,
"probability_known": 0.7508982035928139,
"results": [
true,
false,
false
],
"milestones": 3,
"milestones_completed": 1
},
"{\"Bias\":\"InGroupBias\"}": {
"unit": {
"Bias": "InGroupBias"
},
"questions_answered": 0,
"questions_correct": 0,
"probability_known": 0.3,
"results": [],
"milestones": 0,
"milestones_completed": 0
},
"{\"Fallacy\":{\"InformalFallacy\":\"NoTrueScotsman\"}}": {
"unit": {
"Fallacy": {
"InformalFallacy": "NoTrueScotsman"
}
},
"questions_answered": 0,
"questions_correct": 0,
"probability_known": 0.3,
"results": [],
"milestones": 0,
"milestones_completed": 0
},
"{\"Appraisal\":\"CounterArgument\"}": {
"unit": {
"Appraisal": "CounterArgument"
},
"questions_answered": 0,
"questions_correct": 0,
"probability_known": 0.3,
"results": [],
"milestones": 0,
"milestones_completed": 0
},
"{\"Fallacy\":{\"InformalFallacy\":\"StrawMan\"}}": {
"unit": {
"Fallacy": {
"InformalFallacy": "StrawMan"
}
},
"questions_answered": 0,
"questions_correct": 0,
"probability_known": 0.3,
"results": [],
"milestones": 0,
"milestones_completed": 0
},
"{\"Fallacy\":{\"FormalFallacy\":\"SyllogisticFallacy\"}}": {
"unit": {
"Fallacy": {
"FormalFallacy": "SyllogisticFallacy"
}
},
"questions_answered": 0,
"questions_correct": 0,
"probability_known": 0.3,
"results": [],
"milestones": 0,
"milestones_completed": 0
},
"{\"Fallacy\":{\"InformalFallacy\":\"SpecialPleading\"}}": {
"unit": {
"Fallacy": {
"InformalFallacy": "SpecialPleading"
}
},
"questions_answered": 0,
"questions_correct": 0,
"probability_known": 0.3,
"results": [],
"milestones": 0,
"milestones_completed": 0
},
"{\"Fallacy\":{\"InformalFallacy\":\"TuQuoque\"}}": {
"unit": {
"Fallacy": {
"InformalFallacy": "TuQuoque"
}
},
"questions_answered": 0,
"questions_correct": 0,
"probability_known": 0.3,
"results": [],
"milestones": 0,
"milestones_completed": 0
},
"{\"Fallacy\":{\"InformalFallacy\":\"PostHocErgoPropterHoc\"}}": {
"unit": {
"Fallacy": {
"InformalFallacy": "PostHocErgoPropterHoc"
}
},
"questions_answered": 0,
"questions_correct": 0,
"probability_known": 0.3,
"results": [],
"milestones": 0,
"milestones_completed": 0
},
"{\"Fallacy\":{\"InformalFallacy\":\"AdIgnorantiam\"}}": {
"unit": {
"Fallacy": {
"InformalFallacy": "AdIgnorantiam"
}
},
"questions_answered": 0,
"questions_correct": 0,
"probability_known": 0.3,
"results": [],
"milestones": 0,
"milestones_completed": 0
},
"{\"Logic\":\"LogicalOperations\"}": {
"unit": {
"Logic": "LogicalOperations"
},
"questions_answered": 0,
"questions_correct": 0,
"probability_known": 0.3,
"results": [],
"milestones": 0,
"milestones_completed": 0
},
"{\"Fallacy\":{\"FormalFallacy\":\"QuantificationalFallacy\"}}": {
"unit": {
"Fallacy": {
"FormalFallacy": "QuantificationalFallacy"
}
},
"questions_answered": 0,
"questions_correct": 0,
"probability_known": 0.3,
"results": [],
"milestones": 0,
"milestones_completed": 0
},
"{\"Fallacy\":{\"InformalFallacy\":\"HastyGeneralization\"}}": {
"unit": {
"Fallacy": {
"InformalFallacy": "HastyGeneralization"
}
},
"questions_answered": 0,
"questions_correct": 0,
"probability_known": 0.3,
"results": [],
"milestones": 0,
"milestones_completed": 0
},
"{\"Fallacy\":{\"InformalFallacy\":\"OverGeneralization\"}}": {
"unit": {
"Fallacy": {
"InformalFallacy": "OverGeneralization"
}
},
"questions_answered": 0,
"questions_correct": 0,
"probability_known": 0.3,
"results": [],
"milestones": 0,
"milestones_completed": 0
},
"{\"Fallacy\":{\"InformalFallacy\":\"Bandwagon\"}}": {
"unit": {
"Fallacy": {
"InformalFallacy": "Bandwagon"
}
},
"questions_answered": 0,
"questions_correct": 0,
"probability_known": 0.3,
"results": [],
"milestones": 0,
"milestones_completed": 0
},
"{\"Bias\":\"ConfirmationBias\"}": {
"unit": {
"Bias": "ConfirmationBias"
},
"questions_answered": 0,
"questions_correct": 0,
"probability_known": 0.3,
"results": [],
"milestones": 0,
"milestones_completed": 0
},
"{\"Fallacy\":{\"FormalFallacy\":\"PropositionalFallacy\"}}": {
"unit": {
"Fallacy": {
"FormalFallacy": "PropositionalFallacy"
}
},
"questions_answered": 0,
"questions_correct": 0,
"probability_known": 0.3,
"results": [],
"milestones": 0,
"milestones_completed": 0
},
"{\"Fallacy\":{\"InformalFallacy\":\"AdHominem\"}}": {
"unit": {
"Fallacy": {
"InformalFallacy": "AdHominem"
}
},
"questions_answered": 0,
"questions_correct": 0,
"probability_known": 0.3,
"results": [],
"milestones": 0,
"milestones_completed": 0
},
"{\"Bias\":\"BarnumEffect\"}": {
"unit": {
"Bias": "BarnumEffect"
},
"questions_answered": 0,
"questions_correct": 0,
"probability_known": 0.3,
"results": [],
"milestones": 0,
"milestones_completed": 0
},
"{\"Logic\":\"BooleanAlgebra\"}": {
"unit": {
"Logic": "BooleanAlgebra"
},
"questions_answered": 0,
"questions_correct": 0,
"probability_known": 0.3,
"results": [],
"milestones": 0,
"milestones_completed": 0
},
"{\"Fallacy\":{\"InformalFallacy\":\"SlipperySlope\"}}": {
"unit": {
"Fallacy": {
"InformalFallacy": "SlipperySlope"
}
},
"questions_answered": 0,
"questions_correct": 0,
"probability_known": 0.3,
"results": [],
"milestones": 0,
"milestones_completed": 0
},
"{\"Fallacy\":{\"InformalFallacy\":\"BeggingTheQuestion\"}}": {
"unit": {
"Fallacy": {
"InformalFallacy": "BeggingTheQuestion"
}
},
"questions_answered": 0,
"questions_correct": 0,
"probability_known": 0.3,
"results": [],
"milestones": 0,
"milestones_completed": 0
}
}
}

364
saves/yocirei.json Normal file
View File

@ -0,0 +1,364 @@
{
"name": "Yocirei",
"filename": "saves/yocirei.json",
"progress": {
"{\"Logic\":\"BooleanAlgebra\"}": {
"unit": {
"Logic": "BooleanAlgebra"
},
"questions_answered": 0,
"questions_correct": 0,
"probability_known": 0.3,
"results": [],
"milestones": 0,
"milestones_completed": 0
},
"{\"Bias\":\"BarnumEffect\"}": {
"unit": {
"Bias": "BarnumEffect"
},
"questions_answered": 0,
"questions_correct": 0,
"probability_known": 0.3,
"results": [],
"milestones": 0,
"milestones_completed": 0
},
"{\"Fallacy\":{\"InformalFallacy\":\"PostHocErgoPropterHoc\"}}": {
"unit": {
"Fallacy": {
"InformalFallacy": "PostHocErgoPropterHoc"
}
},
"questions_answered": 0,
"questions_correct": 0,
"probability_known": 0.3,
"results": [],
"milestones": 0,
"milestones_completed": 0
},
"{\"Fallacy\":{\"InformalFallacy\":\"AdIgnorantiam\"}}": {
"unit": {
"Fallacy": {
"InformalFallacy": "AdIgnorantiam"
}
},
"questions_answered": 0,
"questions_correct": 0,
"probability_known": 0.3,
"results": [],
"milestones": 0,
"milestones_completed": 0
},
"{\"Fallacy\":{\"InformalFallacy\":\"QuotingOutOfContext\"}}": {
"unit": {
"Fallacy": {
"InformalFallacy": "QuotingOutOfContext"
}
},
"questions_answered": 0,
"questions_correct": 0,
"probability_known": 0.3,
"results": [],
"milestones": 0,
"milestones_completed": 0
},
"{\"Logic\":\"LogicalOperations\"}": {
"unit": {
"Logic": "LogicalOperations"
},
"questions_answered": 0,
"questions_correct": 0,
"probability_known": 0.3,
"results": [],
"milestones": 0,
"milestones_completed": 0
},
"{\"Fallacy\":{\"FormalFallacy\":\"PropositionalFallacy\"}}": {
"unit": {
"Fallacy": {
"FormalFallacy": "PropositionalFallacy"
}
},
"questions_answered": 0,
"questions_correct": 0,
"probability_known": 0.3,
"results": [],
"milestones": 0,
"milestones_completed": 0
},
"{\"Appraisal\":\"CounterArgument\"}": {
"unit": {
"Appraisal": "CounterArgument"
},
"questions_answered": 0,
"questions_correct": 0,
"probability_known": 0.3,
"results": [],
"milestones": 0,
"milestones_completed": 0
},
"{\"Bias\":\"DunningKrugerEffect\"}": {
"unit": {
"Bias": "DunningKrugerEffect"
},
"questions_answered": 0,
"questions_correct": 0,
"probability_known": 0.3,
"results": [],
"milestones": 0,
"milestones_completed": 0
},
"{\"Fallacy\":{\"InformalFallacy\":\"SlipperySlope\"}}": {
"unit": {
"Fallacy": {
"InformalFallacy": "SlipperySlope"
}
},
"questions_answered": 0,
"questions_correct": 0,
"probability_known": 0.3,
"results": [],
"milestones": 0,
"milestones_completed": 0
},
"{\"Fallacy\":{\"InformalFallacy\":\"NoTrueScotsman\"}}": {
"unit": {
"Fallacy": {
"InformalFallacy": "NoTrueScotsman"
}
},
"questions_answered": 0,
"questions_correct": 0,
"probability_known": 0.3,
"results": [],
"milestones": 0,
"milestones_completed": 0
},
"{\"Fallacy\":{\"FormalFallacy\":\"ProbabilisticFallacy\"}}": {
"unit": {
"Fallacy": {
"FormalFallacy": "ProbabilisticFallacy"
}
},
"questions_answered": 0,
"questions_correct": 0,
"probability_known": 0.3,
"results": [],
"milestones": 0,
"milestones_completed": 0
},
"{\"Fallacy\":{\"InformalFallacy\":\"Bandwagon\"}}": {
"unit": {
"Fallacy": {
"InformalFallacy": "Bandwagon"
}
},
"questions_answered": 0,
"questions_correct": 0,
"probability_known": 0.3,
"results": [],
"milestones": 0,
"milestones_completed": 0
},
"{\"Bias\":\"TheHaloEffect\"}": {
"unit": {
"Bias": "TheHaloEffect"
},
"questions_answered": 0,
"questions_correct": 0,
"probability_known": 0.3,
"results": [],
"milestones": 0,
"milestones_completed": 0
},
"{\"Fallacy\":{\"InformalFallacy\":\"OverGeneralization\"}}": {
"unit": {
"Fallacy": {
"InformalFallacy": "OverGeneralization"
}
},
"questions_answered": 0,
"questions_correct": 0,
"probability_known": 0.3,
"results": [],
"milestones": 0,
"milestones_completed": 0
},
"{\"Appraisal\":\"ConversionToPropositional\"}": {
"unit": {
"Appraisal": "ConversionToPropositional"
},
"questions_answered": 0,
"questions_correct": 0,
"probability_known": 0.3,
"results": [],
"milestones": 0,
"milestones_completed": 0
},
"{\"Fallacy\":{\"InformalFallacy\":\"StrawMan\"}}": {
"unit": {
"Fallacy": {
"InformalFallacy": "StrawMan"
}
},
"questions_answered": 0,
"questions_correct": 0,
"probability_known": 0.3,
"results": [],
"milestones": 0,
"milestones_completed": 0
},
"{\"Fallacy\":{\"InformalFallacy\":\"BeggingTheQuestion\"}}": {
"unit": {
"Fallacy": {
"InformalFallacy": "BeggingTheQuestion"
}
},
"questions_answered": 0,
"questions_correct": 0,
"probability_known": 0.3,
"results": [],
"milestones": 0,
"milestones_completed": 0
},
"{\"Bias\":\"InGroupBias\"}": {
"unit": {
"Bias": "InGroupBias"
},
"questions_answered": 0,
"questions_correct": 0,
"probability_known": 0.3,
"results": [],
"milestones": 0,
"milestones_completed": 0
},
"{\"Fallacy\":{\"InformalFallacy\":\"TexasSharpshooter\"}}": {
"unit": {
"Fallacy": {
"InformalFallacy": "TexasSharpshooter"
}
},
"questions_answered": 0,
"questions_correct": 0,
"probability_known": 0.3,
"results": [],
"milestones": 0,
"milestones_completed": 0
},
"{\"Fallacy\":{\"FormalFallacy\":\"QuantificationalFallacy\"}}": {
"unit": {
"Fallacy": {
"FormalFallacy": "QuantificationalFallacy"
}
},
"questions_answered": 0,
"questions_correct": 0,
"probability_known": 0.3,
"results": [],
"milestones": 0,
"milestones_completed": 0
},
"{\"Bias\":\"ConfirmationBias\"}": {
"unit": {
"Bias": "ConfirmationBias"
},
"questions_answered": 0,
"questions_correct": 0,
"probability_known": 0.3,
"results": [],
"milestones": 0,
"milestones_completed": 0
},
"{\"Fallacy\":{\"FormalFallacy\":\"SyllogisticFallacy\"}}": {
"unit": {
"Fallacy": {
"FormalFallacy": "SyllogisticFallacy"
}
},
"questions_answered": 0,
"questions_correct": 0,
"probability_known": 0.3,
"results": [],
"milestones": 0,
"milestones_completed": 0
},
"\"Introduction\"": {
"unit": "Introduction",
"questions_answered": 4,
"questions_correct": 3,
"probability_known": 1.0,
"results": [
true,
true,
false,
true
],
"milestones": 3,
"milestones_completed": 3
},
"{\"Fallacy\":{\"InformalFallacy\":\"AdHominem\"}}": {
"unit": {
"Fallacy": {
"InformalFallacy": "AdHominem"
}
},
"questions_answered": 0,
"questions_correct": 0,
"probability_known": 0.3,
"results": [],
"milestones": 0,
"milestones_completed": 0
},
"{\"Fallacy\":{\"InformalFallacy\":\"TuQuoque\"}}": {
"unit": {
"Fallacy": {
"InformalFallacy": "TuQuoque"
}
},
"questions_answered": 0,
"questions_correct": 0,
"probability_known": 0.3,
"results": [],
"milestones": 0,
"milestones_completed": 0
},
"{\"Fallacy\":{\"InformalFallacy\":\"SpecialPleading\"}}": {
"unit": {
"Fallacy": {
"InformalFallacy": "SpecialPleading"
}
},
"questions_answered": 0,
"questions_correct": 0,
"probability_known": 0.3,
"results": [],
"milestones": 0,
"milestones_completed": 0
},
"{\"Bias\":\"FundamentalAttributionError\"}": {
"unit": {
"Bias": "FundamentalAttributionError"
},
"questions_answered": 0,
"questions_correct": 0,
"probability_known": 0.3,
"results": [],
"milestones": 0,
"milestones_completed": 0
},
"{\"Fallacy\":{\"InformalFallacy\":\"HastyGeneralization\"}}": {
"unit": {
"Fallacy": {
"InformalFallacy": "HastyGeneralization"
}
},
"questions_answered": 0,
"questions_correct": 0,
"probability_known": 0.3,
"results": [],
"milestones": 0,
"milestones_completed": 0
}
}
}

3
src/course/mod.rs Normal file
View File

@ -0,0 +1,3 @@
pub(crate) mod units;
pub(crate) mod story;
pub(crate) mod tracker;

218
src/course/story.rs Normal file
View File

@ -0,0 +1,218 @@
use serde::{Deserialize, Serialize};
use crate::{files, learner::profile, printers::{self, clear_console, print_boxed, print_screen}, utilities::questions::{ask_mcq, ask_multi_select, ask_plaintext}, Module};
use super::units;
const REQUIRED_FOR_MASTERY: f64 = 0.1;
#[derive(Serialize, Deserialize, Debug)]
pub struct UnitStory {
unit: Module,
title: String,
screens: Vec<Screen>
}
#[derive(Serialize, Deserialize, Debug)]
enum ScreenType {
Text,
Mcq,
Checkboxes,
FreeResponse,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct Screen {
screen_type: ScreenType,
text: String,
question: Option<String>,
options: Option<Vec<String>>,
correct_indices: Option<Vec<usize>>,
correct_text: Option<Vec<String>>,
hints: Option<Vec<String>>,
}
pub fn outer_loop(learner: &mut profile::Learner) {
loop {
let next_unit = select_next_unit(&learner);
if next_unit.is_none() {
print_boxed("Congratulations! You have completed all the modules. You are now a master of the galaxy! I have nothing new to teach you. Well done.", printers::StatementType::CorrectFeedback);
println!("Saving your progress...");
files::save_to_file(&learner.filename, &learner)
.expect("Failed to save profile file");
println!("Progress saved. Goodbye!");
return;
}
let next_unit = next_unit.unwrap();
inner_loop(next_unit.clone(), learner);
if learner.progress.get(&next_unit).map_or(true, |p| p.get_probability() >= 1.0) {
print_boxed(&format!("You have completed {:?}", next_unit), printers::StatementType::GeneralFeedback);
} else {
print_boxed(&format!("I think we still need to practice {:?}. Let's keep practicing before we move on.", next_unit), printers::StatementType::GeneralFeedback);
}
let answer = ask_mcq("Do you want to keep going or leave and save?", &[
"Keep working (and save the galaxy)",
"Leave and save (risking all life in the galaxy)",
]).unwrap();
if answer == "Keep working (and save the galaxy)" {
print_boxed(&format!("Great! Let's carry on."), printers::StatementType::GeneralFeedback);
println!("Saving your progress...");
files::save_to_file(&learner.filename, &learner)
.expect("Failed to save profile file");
println!("Progress saved. Let's keep going!");
learner.display_progress();
continue;
} else {
println!("Bummer. We were really counting on you, but I guess you probably have human needs to attend to.");
println!("Saving your progress...");
files::save_to_file(&learner.filename, &learner)
.expect("Failed to save profile file");
println!("Progress saved. Goodbye!");
return;
}
}
}
pub fn inner_loop(unit: Module, learner: &mut profile::Learner) {
print_boxed(&format!("The unit is {}", unit), printers::StatementType::GeneralFeedback);
let filename = unit.get_filename();
let unit_story = files::parse_unit(&filename).unwrap();
let mut milestones = 0;
printers::unit_screen(&unit_story.title);
for screen in &unit_story.screens {
match screen.screen_type {
ScreenType::Text => { }
_ => {
milestones += 1;
}
}
}
if let Some(progress) = learner.progress.get_mut(&unit) {
progress.set_milestones(milestones);
progress.set_completed_milestones(0);
}
for screen in unit_story.screens {
let mut attempts = 0;
match screen.screen_type {
ScreenType::Text => {
clear_console();
print_screen(&screen.text);
}
ScreenType::Mcq => {
let options: Vec<&str> = screen.options.as_ref().unwrap().iter().map(|s| s.as_str()).collect();
let mut options = screen.options.clone().unwrap();
options.push("Get hint".to_string());
let options: Vec<&str> = options.iter().map(|s| s.as_str()).collect();
loop {
let answer = ask_mcq(&screen.question.clone().unwrap(), &options).unwrap();
let correct = if screen.correct_indices.as_ref().unwrap().contains(&options.iter().position(|x| *x == answer).unwrap_or(usize::MAX)) {
print_boxed("Correct!", printers::StatementType::CorrectFeedback);
true
} else {
if screen.hints.is_some() {
if attempts == screen.hints.iter().len() {
let correct_indices_text: Vec<_> = screen.correct_indices.unwrap().iter().map(|&i| options[i]).collect();
print_boxed(&format!("It seems like these hints aren't working. The answer is: {:?}", correct_indices_text), printers::StatementType::IncorrectFeedback);
break;
} else {
let hint = screen.hints.as_ref().unwrap()[attempts].clone();
print_boxed(&format!("Please try the question again. Here's a hint: {}", hint), printers::StatementType::IncorrectFeedback);
println!("Please try the question again. Here's a hint: {}", hint);
attempts += 1;
}
}
false
};
learner.update_progress(unit.clone(), correct);
learner.progress.get_mut(&unit).unwrap().print_progress();
if correct {
break;
}
}
}
ScreenType::Checkboxes => {
let options: Vec<&str> = screen.options.as_ref().unwrap().iter().map(|s| s.as_str()).collect();
let mut options = screen.options.clone().unwrap();
options.push("Get hint".to_string());
let options: Vec<&str> = options.iter().map(|s| s.as_str()).collect();
loop {
let answers = ask_multi_select(&screen.question.clone().unwrap(), &options).unwrap();
let correct_indices: Vec<usize> = screen.correct_indices.as_ref().unwrap().to_vec();
let selected_indices: Vec<usize> = answers.iter().filter_map(|answer| options.iter().position(|x| *x == *answer)).collect();
let correct = selected_indices.iter().all(|&index| correct_indices.contains(&index)) && selected_indices.len() == correct_indices.len();
if correct {
print_boxed("Correct!", printers::StatementType::CorrectFeedback);
learner.update_progress(unit.clone(), true);
learner.progress.get_mut(&unit).unwrap().print_progress();
break;
} else {
if screen.hints.is_some() {
if attempts == screen.hints.iter().len() {
print_boxed(&format!("It seems like these hints aren't working. The correct answers are: {:?}", correct_indices.iter().map(|&i| options[i]).collect::<Vec<_>>()), printers::StatementType::IncorrectFeedback);
break;
} else {
let hint = screen.hints.as_ref().unwrap()[attempts].clone();
let hint = format!("{}\nThere are {} correct answers.", hint, correct_indices.len());
print_boxed(&format!("Please try the question again. Here's a hint: {}", hint), printers::StatementType::IncorrectFeedback);
println!("Please try the question again. Here's a hint: {}", hint);
attempts += 1;
}
}
learner.update_progress(unit.clone(), false);
learner.progress.get_mut(&unit).unwrap().print_progress();
}
}
}
ScreenType::FreeResponse => {
loop {
let answer= ask_plaintext(&screen.question.clone().unwrap()).unwrap();
let correct_text: Vec<String> = screen.correct_text.as_ref().unwrap().to_vec();
let normalized_answer = answer.trim().to_lowercase().replace(|c: char| !c.is_alphanumeric(), "");
let correct = screen.correct_text.as_ref().unwrap().iter().any(|i| {
let normalized_correct_answer = i.trim().to_lowercase().replace(|c: char| !c.is_alphanumeric(), "");
normalized_answer == normalized_correct_answer
});
if correct {
print_boxed("Correct!", printers::StatementType::CorrectFeedback);
learner.update_progress(unit.clone(), true);
learner.progress.get_mut(&unit).unwrap().print_progress();
break;
} else {
if screen.hints.is_some() {
if attempts == screen.hints.iter().len() {
print_boxed(&format!("It seems like these hints aren't working. The correct answers are: {:?}", correct_text), printers::StatementType::IncorrectFeedback);
break;
} else {
let hint = screen.hints.as_ref().unwrap()[attempts].clone();
print_boxed(&format!("Please try the question again. Here's a hint: {}", hint), printers::StatementType::IncorrectFeedback);
println!("Please try the question again. Here's a hint: {}", hint);
attempts += 1;
}
}
learner.update_progress(unit.clone(), false);
learner.progress.get_mut(&unit).unwrap().print_progress();
}
}
}
}
}
}
pub fn select_next_unit(learner: &profile::Learner) -> Option<units::Module> {
let mut modules = units::Module::iter();
if learner.progress.get(&Module::Introduction).map_or(true, |p| p.get_probability() == 0.0) {
return Some(crate::Module::Introduction);
}
for module in modules {
if let Some(progress) = learner.progress.get(&module) {
if progress.get_probability() < REQUIRED_FOR_MASTERY {
if progress.get_probability() != 0.0 {
print_boxed(&format!("It looks like you haven't mastered {} yet. Let's work on that.", module), printers::StatementType::GeneralFeedback);
}
return Some(module);
}
// if progress.get_milestones_completed() == 0 {
// return Some(module);
// }
}
}
return None;
}

141
src/course/tracker.rs Normal file
View File

@ -0,0 +1,141 @@
use crossterm::{style::Stylize, terminal::size};
use serde::{Deserialize, Serialize};
use super::units::Module;
#[derive(Serialize, Deserialize, Debug, PartialEq, Clone)]
pub(crate) struct Tracker {
pub(crate) unit: Module,
questions_answered: u64,
questions_correct: u64,
probability_known: f64,
results: Vec<bool>, // Vector to store correct/incorrect results
milestones: usize,
milestones_completed: usize,
}
impl Tracker {
pub(crate) fn new(unit: Module) -> Self {
Self {
unit,
questions_answered: 0,
questions_correct: 0,
probability_known: 0.3, // Start with a 30% chance of knowing the answer
results: Vec::new(),
milestones: 0,
milestones_completed: 0,
}
}
pub(crate) fn quiz_result(&mut self, correct: bool) -> f64 {
self.questions_answered += 1;
self.results.push(correct);
if correct {
self.milestones_completed = (self.milestones_completed + 1).min(self.milestones);
self.questions_correct += 1;
}
// Calculate streak of correct answers
let mut streak = 0;
for &result in self.results.iter().rev() {
if result {
streak += 1;
} else {
break;
}
}
let prior = self.probability_known;
let slip = if self.questions_answered < 10 {
0.05 // Lower slip rate for beginners
} else if self.probability_known > 0.8 {
0.15 // Higher slip rate for advanced students
} else {
0.1 // Default slip rate
};
let guess = if self.questions_answered < 10 {
0.2 // Lower guess rate for beginners
} else if self.probability_known > 0.8 {
0.3 // Higher guess rate for advanced students
} else {
0.25 // Default guess rate (average of 4 options)
};
let likelihood = if correct {
1.0 - slip
} else {
guess
};
// Adjust posterior based on streak
let streak_multiplier = 1.0 + (streak as f64 * 0.1); // Increase probability for streaks
let posterior = if prior == 0.0 {
likelihood * streak_multiplier // Start with likelihood if prior is zero
} else {
((likelihood * prior) / ((likelihood * prior) + ((1.0 - likelihood) * (1.0 - prior)))) * streak_multiplier
};
self.probability_known = posterior.min(1.0).max(0.0);
self.probability_known
}
pub(crate) fn get_probability(&self) -> f64 {
// self.probability_known
if self.milestones == 0 {
return 0.0;
}
if self.milestones_completed == 0 {
return 0.0;
}
self.milestones_completed as f64 / self.milestones as f64
}
pub fn get_milestones(&self) -> usize {
self.milestones
}
pub fn get_milestones_completed(&self) -> usize {
self.milestones_completed
}
pub(crate) fn print_progress(&self) {
let (cols, _) = size().unwrap();
// let percent = self.probability_known * 100.0; // Assuming `probability_known` is a method in `LessonTracker`
let percent = if self.milestones_completed > 0 {
(self.milestones_completed as f64) / (self.milestones as f64) * 100.0 // Assuming `probability_known` is a method in `LessonTracker`
} else {0.0};
let percent_name = format!("{:>4.0}%", percent).dark_blue();
let unit_name = format!("{} {} ", percent_name, self.unit);
let dots = ".".repeat((cols as usize).saturating_sub(unit_name.len() + 11));
match percent {
percent if percent > 0.0 => {
let progress = percent as usize;
let filled_percent = progress / 5; // Each '=' represents 5%
let bars = format!(
"|{}{}|",
"".repeat(filled_percent),
" ".repeat(20_usize.saturating_sub(filled_percent)),
);
println!(
"{} {} {}",
unit_name,
dots,
bars,
);
}
_ => {
println!(
"{} {} |{}|",
unit_name,
dots,
" ".repeat(20),
);
}
}
}
pub fn set_milestones(&mut self, milestones: usize) {
self.milestones = milestones;
}
pub fn set_completed_milestones(&mut self, completed_milestones: usize) {
self.milestones_completed = completed_milestones;
}
}

382
src/course/units.rs Normal file
View File

@ -0,0 +1,382 @@
use serde::{Deserialize, Serialize};
use std::fmt;
use rand::{rng, seq::IndexedRandom};
use std::fs;
use std::path::Path;
// use strum_macros::EnumIter;
/// Module defines the structure and data for the course units, including lessons and modules.
#[derive(Serialize, Deserialize, Debug, Eq, PartialEq, Hash, Clone)]
pub(crate) enum Module {
Introduction,
Logic(Logic),
Fallacy(Fallacy),
Bias(Bias),
Appraisal(Appraisal),
}
impl Default for Module {
fn default() -> Self {
Module::Introduction
}
}
impl fmt::Display for Module {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Module::Introduction => write!(f, "Introduction"),
Module::Logic(logic) => write!(f, "Logic: {}", logic),
Module::Fallacy(fallacy) => write!(f, "Fallacy: {}", fallacy),
Module::Bias(bias) => write!(f, "Bias: {}", bias),
Module::Appraisal(appraisal) => write!(f, "Appraisal: {}", appraisal),
}
}
}
impl Module {
pub(crate) fn get_filename(&self) -> String {
match self {
Module::Introduction => format!("data/introduction.json"),
Module::Logic(logic) => format!("data/logic/{}.json", logic.get_filename()),
Module::Fallacy(fallacy) => format!("data/fallacy/{}.json", fallacy.get_filename()),
Module::Bias(bias) => format!("data/bias/{}.json", bias.get_filename()),
Module::Appraisal(appraisal) => format!("data/appraisal/{}.json", appraisal.get_filename()),
}
}
pub(crate) fn random() -> Self {
let mut rng = rng();
let variants = [
Module::Logic(Logic::random()),
Module::Fallacy(Fallacy::random()),
Module::Bias(Bias::random()),
Module::Appraisal(Appraisal::random()),
];
variants.choose(&mut rng).unwrap().clone()
}
pub fn iter() -> impl Iterator<Item = Self> {
vec![
Module::Introduction,
Module::Logic(Logic::LogicalOperations),
Module::Logic(Logic::BooleanAlgebra),
Module::Fallacy(Fallacy::FormalFallacy(FormalFallacy::PropositionalFallacy)),
Module::Fallacy(Fallacy::FormalFallacy(FormalFallacy::ProbabilisticFallacy)),
Module::Fallacy(Fallacy::FormalFallacy(FormalFallacy::SyllogisticFallacy)),
Module::Fallacy(Fallacy::FormalFallacy(FormalFallacy::QuantificationalFallacy)),
Module::Fallacy(Fallacy::InformalFallacy(InformalFallacy::PostHocErgoPropterHoc)),
Module::Fallacy(Fallacy::InformalFallacy(InformalFallacy::SlipperySlope)),
Module::Fallacy(Fallacy::InformalFallacy(InformalFallacy::TexasSharpshooter)),
Module::Fallacy(Fallacy::InformalFallacy(InformalFallacy::HastyGeneralization)),
Module::Fallacy(Fallacy::InformalFallacy(InformalFallacy::OverGeneralization)),
Module::Fallacy(Fallacy::InformalFallacy(InformalFallacy::NoTrueScotsman)),
Module::Fallacy(Fallacy::InformalFallacy(InformalFallacy::QuotingOutOfContext)),
Module::Fallacy(Fallacy::InformalFallacy(InformalFallacy::AdHominem)),
Module::Fallacy(Fallacy::InformalFallacy(InformalFallacy::TuQuoque)),
Module::Fallacy(Fallacy::InformalFallacy(InformalFallacy::Bandwagon)),
Module::Fallacy(Fallacy::InformalFallacy(InformalFallacy::StrawMan)),
Module::Fallacy(Fallacy::InformalFallacy(InformalFallacy::AdIgnorantiam)),
Module::Fallacy(Fallacy::InformalFallacy(InformalFallacy::SpecialPleading)),
Module::Fallacy(Fallacy::InformalFallacy(InformalFallacy::BeggingTheQuestion)),
Module::Bias(Bias::ConfirmationBias),
Module::Bias(Bias::TheHaloEffect),
Module::Bias(Bias::FundamentalAttributionError),
Module::Bias(Bias::InGroupBias),
Module::Bias(Bias::DunningKrugerEffect),
Module::Bias(Bias::BarnumEffect),
Module::Appraisal(Appraisal::ConversionToPropositional),
Module::Appraisal(Appraisal::CounterArgument),
]
.into_iter()
}
// pub(crate) fn create_empty_files() -> std::io::Result<()> {
// for module in Module::iter() {
// let filename = module.get_filename();
// let path = Path::new(&filename);
// if let Some(parent) = path.parent() {
// fs::create_dir_all(parent)?;
// }
// fs::File::create(path)?;
// }
// Ok(())
// }
}
/// Logic module, which includes various lessons related to logical reasoning and operations.
#[derive(Clone, Debug, Serialize, Deserialize, Eq, PartialEq, Hash)]
pub(crate) enum Logic {
LogicalOperations,
BooleanAlgebra,
}
impl Default for Logic {
fn default() -> Self {
Logic::LogicalOperations
}
}
impl std::fmt::Display for Logic {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Logic::LogicalOperations => write!(f, "Logical Operations"),
Logic::BooleanAlgebra => write!(f, "Boolean Algebra"),
}
}
}
impl Logic {
pub(crate) fn get_filename(&self) -> String {
match self {
Logic::LogicalOperations => "logical_operations".to_string(),
Logic::BooleanAlgebra => "boolean_algebra".to_string(),
}
}
pub(crate) fn random() -> Self {
let mut rng = rng();
let variants = [Logic::LogicalOperations, Logic::BooleanAlgebra];
variants.choose(&mut rng).unwrap().clone()
}
}
/// Fallacy module, which includes various lessons related to logical fallacies and errors in reasoning.
#[derive(Clone, Debug, Serialize, Deserialize, Eq, PartialEq, Hash)]
pub(crate) enum Fallacy {
FormalFallacy(FormalFallacy),
InformalFallacy(InformalFallacy),
}
impl Default for Fallacy {
fn default() -> Self {
Fallacy::FormalFallacy(FormalFallacy::PropositionalFallacy)
}
}
impl std::fmt::Display for Fallacy {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Fallacy::FormalFallacy(formal) => write!(f, "Formal: {}", formal),
Fallacy::InformalFallacy(informal) => write!(f, "Informal: {}", informal),
}
}
}
impl Fallacy {
pub(crate) fn get_filename(&self) -> String {
match self {
Fallacy::FormalFallacy(formal) => format!("formal/{}", formal.get_filename()),
Fallacy::InformalFallacy(informal) => format!("informal/{}", informal.get_filename()),
}
}
pub(crate) fn random() -> Self {
let mut rng = rng();
let variants = [
Fallacy::FormalFallacy(FormalFallacy::random()),
Fallacy::InformalFallacy(InformalFallacy::random()),
];
variants.choose(&mut rng).unwrap().clone()
}
}
/// Formal fallacies, which are errors in the structure of an argument.
#[derive(Clone, Debug, Serialize, Deserialize, Eq, PartialEq, Hash)]
pub(crate) enum FormalFallacy {
PropositionalFallacy,
ProbabilisticFallacy,
SyllogisticFallacy,
QuantificationalFallacy
}
impl Default for FormalFallacy {
fn default() -> Self {
FormalFallacy::PropositionalFallacy
}
}
impl std::fmt::Display for FormalFallacy {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
FormalFallacy::PropositionalFallacy => write!(f, "Propositional Fallacy"),
FormalFallacy::ProbabilisticFallacy => write!(f, "Probabilistic Fallacy"),
FormalFallacy::SyllogisticFallacy => write!(f, "Syllogistic Fallacy"),
FormalFallacy::QuantificationalFallacy => write!(f, "Quantificational Fallacy"),
}
}
}
impl FormalFallacy {
pub(crate) fn get_filename(&self) -> String {
match self {
FormalFallacy::PropositionalFallacy => "propositional".to_string(),
FormalFallacy::ProbabilisticFallacy => "probabilistic".to_string(),
FormalFallacy::SyllogisticFallacy => "syllogistic".to_string(),
FormalFallacy::QuantificationalFallacy => "quantificational".to_string(),
}
}
pub(crate) fn random() -> Self {
let mut rng = rng();
let variants = [
FormalFallacy::PropositionalFallacy,
FormalFallacy::ProbabilisticFallacy,
FormalFallacy::SyllogisticFallacy,
FormalFallacy::QuantificationalFallacy,
];
variants.choose(&mut rng).unwrap().clone()
}
}
/// Informal fallacies, which are errors in reasoning that do not involve the structure of the argument.
#[derive(Clone, Debug, Serialize, Deserialize, Eq, PartialEq, Hash)]
pub(crate) enum InformalFallacy {
PostHocErgoPropterHoc,
SlipperySlope,
TexasSharpshooter,
HastyGeneralization,
OverGeneralization,
NoTrueScotsman,
QuotingOutOfContext,
AdHominem,
TuQuoque,
Bandwagon,
StrawMan,
AdIgnorantiam,
SpecialPleading,
BeggingTheQuestion
}
impl Default for InformalFallacy {
fn default() -> Self {
InformalFallacy::PostHocErgoPropterHoc
}
}
impl std::fmt::Display for InformalFallacy {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
InformalFallacy::PostHocErgoPropterHoc => write!(f, "Post Hoc Ergo Propter Hoc"),
InformalFallacy::SlipperySlope => write!(f, "Slippery Slope"),
InformalFallacy::TexasSharpshooter => write!(f, "Texas Sharpshooter"),
InformalFallacy::HastyGeneralization => write!(f, "Hasty Generalization"),
InformalFallacy::OverGeneralization => write!(f, "Over Generalization"),
InformalFallacy::NoTrueScotsman => write!(f, "No True Scotsman"),
InformalFallacy::QuotingOutOfContext => write!(f, "Quoting Out Of Context"),
InformalFallacy::AdHominem => write!(f, "Ad Hominem"),
InformalFallacy::TuQuoque => write!(f, "Tu Quoque"),
InformalFallacy::Bandwagon => write!(f, "Bandwagon"),
InformalFallacy::StrawMan => write!(f, "Straw Man"),
InformalFallacy::AdIgnorantiam => write!(f, "Ad Ignorantiam"),
InformalFallacy::SpecialPleading => write!(f, "Special Pleading"),
InformalFallacy::BeggingTheQuestion => write!(f, "Begging The Question"),
}
}
}
impl InformalFallacy {
pub(crate) fn get_filename(&self) -> String {
match self {
InformalFallacy::PostHocErgoPropterHoc => "post_hoc_ergo_propter_hoc".to_string(),
InformalFallacy::SlipperySlope => "slippery_slope".to_string(),
InformalFallacy::TexasSharpshooter => "texas_sharpshooter".to_string(),
InformalFallacy::HastyGeneralization => "hasty_generalization".to_string(),
InformalFallacy::OverGeneralization => "over_generalization".to_string(),
InformalFallacy::NoTrueScotsman => "no_true_scotsman".to_string(),
InformalFallacy::QuotingOutOfContext => "quoting_out_of_context".to_string(),
InformalFallacy::AdHominem => "ad_hominem".to_string(),
InformalFallacy::TuQuoque => "tu_quoque".to_string(),
InformalFallacy::Bandwagon => "bandwagon".to_string(),
InformalFallacy::StrawMan => "straw_man".to_string(),
InformalFallacy::AdIgnorantiam => "ad_ignorantiam".to_string(),
InformalFallacy::SpecialPleading => "special_pleading".to_string(),
InformalFallacy::BeggingTheQuestion => "begging_the_question".to_string(),
}
}
pub(crate) fn random() -> Self {
let mut rng = rng();
let variants = [
InformalFallacy::PostHocErgoPropterHoc,
InformalFallacy::SlipperySlope,
InformalFallacy::TexasSharpshooter,
InformalFallacy::HastyGeneralization,
InformalFallacy::OverGeneralization,
InformalFallacy::NoTrueScotsman,
InformalFallacy::QuotingOutOfContext,
InformalFallacy::AdHominem,
InformalFallacy::TuQuoque,
InformalFallacy::Bandwagon,
InformalFallacy::StrawMan,
InformalFallacy::AdIgnorantiam,
InformalFallacy::SpecialPleading,
InformalFallacy::BeggingTheQuestion,
];
variants.choose(&mut rng).unwrap().clone()
}
}
/// Bias module, which includes various lessons related to cognitive biases and their impact on reasoning.
#[derive(Clone, Debug, Serialize, Deserialize, Eq, PartialEq, Hash)]
pub(crate) enum Bias {
ConfirmationBias,
TheHaloEffect,
FundamentalAttributionError,
InGroupBias,
DunningKrugerEffect,
BarnumEffect
}
impl Default for Bias {
fn default() -> Self {
Bias::ConfirmationBias
}
}
impl std::fmt::Display for Bias {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Bias::ConfirmationBias => write!(f, "Confirmation Bias"),
Bias::TheHaloEffect => write!(f, "The Halo Effect"),
Bias::FundamentalAttributionError => write!(f, "Fundamental Attribution Error"),
Bias::InGroupBias => write!(f, "In Group Bias"),
Bias::DunningKrugerEffect => write!(f, "Dunning Kruger Effect"),
Bias::BarnumEffect => write!(f, "Barnum Effect"),
}
}
}
impl Bias {
pub(crate) fn get_filename(&self) -> String {
match self {
Bias::ConfirmationBias => "confirmation_bias".to_string(),
Bias::TheHaloEffect => "the_halo_effect".to_string(),
Bias::FundamentalAttributionError => "fundamental_attribution_error".to_string(),
Bias::InGroupBias => "in_group_bias".to_string(),
Bias::DunningKrugerEffect => "dunning_kruger_effect".to_string(),
Bias::BarnumEffect => "barnum_effect".to_string(),
}
}
pub(crate) fn random() -> Self {
let mut rng = rng();
let variants = [
Bias::ConfirmationBias,
Bias::TheHaloEffect,
Bias::FundamentalAttributionError,
Bias::InGroupBias,
Bias::DunningKrugerEffect,
Bias::BarnumEffect,
];
variants.choose(&mut rng).unwrap().clone()
}
}
/// Appraisal module, which includes various lessons related to the evaluation and appraisal of arguments.
#[derive(Clone, Debug, Serialize, Deserialize, Eq, PartialEq, Hash)]
pub(crate) enum Appraisal {
ConversionToPropositional,
CounterArgument,
}
impl Default for Appraisal {
fn default() -> Self {
Appraisal::ConversionToPropositional
}
}
impl std::fmt::Display for Appraisal {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Appraisal::ConversionToPropositional => write!(f, "Conversion to Propositional"),
Appraisal::CounterArgument => write!(f, "Counter Argument"),
}
}
}
impl Appraisal {
pub(crate) fn get_filename(&self) -> String {
match self {
Appraisal::ConversionToPropositional => "conversion_to_propositional".to_string(),
Appraisal::CounterArgument => "counter_argument".to_string(),
}
}
pub(crate) fn random() -> Self {
let mut rng = rng();
let variants = [
Appraisal::ConversionToPropositional,
Appraisal::CounterArgument,
];
variants.choose(&mut rng).unwrap().clone()
}
}

1
src/learner/mod.rs Normal file
View File

@ -0,0 +1 @@
pub(crate) mod profile;

94
src/learner/profile.rs Normal file
View File

@ -0,0 +1,94 @@
use std::collections::HashMap;
use crossterm::style::Stylize;
use crossterm::terminal::size;
use serde::{Deserialize, Serialize};
use crate::course::{units, tracker};
use crate::{files, printers};
use crate::printers::{clear_console, wait_for_input};
use crate::utilities::questions;
#[serde_with::serde_as]
#[derive(Serialize, Deserialize, Debug)]
pub struct Learner {
pub name: String,
pub filename: String,
#[serde_as(as = "HashMap<serde_with::json::JsonString, _>")]
pub progress: HashMap<units::Module, tracker::Tracker>,
}
impl Learner {
pub fn new() -> Self {
let previous_saves = files::get_previous_saves();
let user = questions::ask_mcq(
"Please select or create a user profile.",
&previous_saves.iter().map(String::as_str).collect::<Vec<&str>>(),
).unwrap();
let learner;
if user == "Create New" {
let name = questions::ask_name();
let sanitized_name = files::sanitize_name(name.as_str());
let filename = format!("saves/{}.json", sanitized_name);
let modules = units::Module::iter();
let mut progress = HashMap::new();
for module in modules {
progress.insert(module.clone(), tracker::Tracker::new(module));
}
learner = Self {
name: name.clone(),
filename: filename.clone(),
progress: progress.clone(),
};
files::save_to_file(&filename, &learner).expect("Failed to create new profile file");
printers::print_boxed(&format!("Profile created for {}. Your progress will be saved in {}.", name, filename), printers::StatementType::Default);
printers::print_boxed("You won't have any progress yet, but the next screen shows what your progress report will look like. Fill all the progress bars to win.", printers::StatementType::Default);
} else {
let sanitized_name = files::sanitize_name(&user);
let filename = format!("saves/{}.json", sanitized_name);
learner = files::load_from_file(&filename).expect("Failed to load profile file");
printers::print_boxed(&format!("Profile loaded for {}.", user), printers::StatementType::Default);
}
learner.display_progress();
return learner
}
pub fn display_progress(&self) {
clear_console();
let (cols, _) = size().unwrap();
let dashes = "-".repeat((cols as usize));
let dots = " ".repeat((cols as usize).saturating_sub(42));
println!("Progress Report for {}:", self.name);
println!("{}", dashes);
println!("{}", format!(" Unit {} | Mastery |", dots).bold());
let mut modules = units::Module::iter();
for module in modules {
if let Some(progress) = self.progress.get(&module) {
self.progress.get(&module).unwrap().print_progress();
}
}
// let mut sorted_progress: Vec<_> = self.progress.order();
// // sorted_progress.sort_by_key(|(module, _)| module.to_string());
// for (_, tracker) in sorted_progress {
// tracker.print_progress();
// }
println!("{}", dashes);
wait_for_input();
}
pub fn update_progress(&mut self, unit: units::Module, question_correct: bool) {
if let Some(tracker) = self.progress.get_mut(&unit) {
tracker.quiz_result(question_correct);
}
}
}

19
src/main.rs Normal file
View File

@ -0,0 +1,19 @@
mod learner;
mod utilities;
mod course;
use course::{story, units::Module};
use learner::profile::Learner;
use utilities::{files, printers, printers::StatementType};
fn main() {
let mut learner = Learner::new();
printers::print_boxed(
&format!("Welcome, {}! Thanks for joining me.", learner.name),
StatementType::Default );
story::outer_loop(&mut learner);
}

122
src/utilities/files.rs Normal file
View File

@ -0,0 +1,122 @@
use std::{fs::File, io::Read};
use serde::de::DeserializeOwned;
use crate::{course::story, learner::profile::{self, Learner}};
// pub fn save_learner_progress(learner: &Learner, file: &str) -> Result<(), std::io::Error> {
// learner.clone().save_to_file(file)?;
// Ok(())
// }
pub fn get_previous_saves() -> Vec<String> {
let mut previous_saves = std::fs::read_dir("saves/")
.unwrap_or_else(|_| {
std::fs::create_dir_all("saves/").expect("Failed to create the saves/ directory");
std::fs::read_dir("saves/").expect("Failed to read the saves/ directory")
})
.filter_map(|entry| {
let entry = entry.ok()?;
let path = entry.path();
if path.extension()?.to_str()? == "json" {
let file_content = std::fs::read_to_string(&path).ok()?;
let learner: Learner = serde_json::from_str(&file_content).ok()?;
Some(learner.name)
} else {
None
}
})
.collect::<Vec<String>>();
previous_saves.push("Create New".to_string());
previous_saves
}
pub fn save_to_file(file: &str, learner: &Learner) -> Result<String, std::io::Error> {
match serde_json::to_string_pretty(learner) {
Ok(json_data) => match std::fs::write(file, json_data) {
Ok(_) => Ok(format!("File saved to {}", file.to_string())),
Err(e) => {
eprintln!("Error writing to file: {}", e);
Err(e)
}
},
Err(e) => {
eprintln!("Error serializing learner: {}", e);
Err(e.into())
}
}
}
pub fn load_from_file<T: DeserializeOwned>(filename: &str) -> Result<T, Box<dyn std::error::Error>> {
let mut file = File::open(filename)?;
let mut contents = String::new();
file.read_to_string(&mut contents)?;
let data = serde_json::from_str(&contents)?;
Ok(data)
}
// pub fn load_learner_progress() -> Result<Learner, std::io::Error> {
// let mut new_session: bool = false;
// let mut learner = {
// // Initialize the learner's progress
// // Check if a save file exists to load previous progress
// if std::path::Path::new(SAVE_FILE).exists() {
// match Learner::load_from_file(SAVE_FILE) {
// Ok(progress) => {
// print_boxed(&format!("Welcome back, {}! Let's continue your journey in FABLE.", progress.name), StatementType::Default );
// progress
// }
// Err(e) => {
// new_session = true;
// print_boxed(&format!("Couldn't load your save file due to error: {}.\nStarting a new learning experience.", e), StatementType::IncorrectFeedback );
// Learner::initialize_learner()
// }
// }
// } else {
// new_session = true;
// // Start a new session
// print_boxed("
// Welcome to FABLE, an interactive tutor for formal reasoning.
// Fallacy ~~~~~~~~~~~~
// Awareness and ~~~~~~
// Bias ~~~~~~~~~~~~~~~
// Learning ~~~~~~~~~~~
// Environment ~~~~~~~~", StatementType::Default);
// print_boxed("
// My name is Francis, and I am here to help you learn about formal reasoning.\n
// You see, many years ago, my people were nearly wiped out because of a lack of critical thinking.
// I have been programmed to help you avoid the same fate.\n
// In the next screen, you can select what you want to study.
// Choose wisely. The fate of your people depends on it.", StatementType::Default);
// Learner::initialize_learner()
// }
// };
// if !new_session {
// learner.display_progress();
// learner.confirm_course_selection(); // Confirm the course selection after reading the progress
// }
// let json_data = std::fs::read_to_string(SAVE_FILE)?;
// let progress: Learner = serde_json::from_str(&json_data)?;
// Ok(progress)
// }
// pub fn load_learner_progress() -> Result<Learner, std::io::Error> {
// let json_data = fs::read_to_string(SAVE_FILE)?;
// let progress: Learner = serde_json::from_str(&json_data)?;
// Ok(progress)
// }
pub fn sanitize_name(name: &str) -> String {
name.replace(|c: char| !c.is_alphanumeric(), "_").to_lowercase()
}
pub fn parse_unit(filename: &str) -> Result<story::UnitStory, Box<dyn std::error::Error>> {
let mut file = File::open(filename)?;
let mut contents = String::new();
file.read_to_string(&mut contents)?;
let data = serde_json::from_str(&contents)?;
Ok(data)
}

123
src/utilities/generators.rs Normal file
View File

@ -0,0 +1,123 @@
use rand::Rng;
/// Syllables that can make up a name.
/// WARNING: Later in this file, there is a list of offensive terms that should not be included in the name.
/// If you add more syllables, make sure they do not form any offensive terms.
const SYLLABLES: [&str; 295] = [
"ba", "be", "bi", "bo", "bu",
"ca", "ce", "ci", "co", "cu",
"da", "de", "di", "do", "du",
"fa", "fe", "fi", "fo", "fu",
"ga", "ge", "gi", "go", "gu",
"ha", "he", "hi", "ho", "hu",
"ja", "je", "ji", "jo", "ju",
"ka", "ke", "ki", "ko", "ku",
"la", "le", "li", "lo", "lu",
"ma", "me", "mi", "mo", "mu",
"na", "ne", "ni", "no", "nu",
"pa", "pe", "pi", "po", "pu",
"qa", "qe", "qi", "qo", "qu",
"ra", "re", "ri", "ro", "ru",
"sa", "se", "si", "so", "su",
"ta", "te", "ti", "to", "tu",
"va", "ve", "vi", "vo", "vu",
"wa", "we", "wi", "wo", "wu",
"xa", "xe", "xi", "xo", "xu",
"ya", "ye", "yi", "yo", "yu",
"za", "ze", "zi", "zo", "zu",
"bai", "bau", "bei", "beu", "boi", "bou",
"cai", "cau", "cei", "ceu", "coi", "cou",
"dai", "dau", "dei", "deu", "doi", "dou",
"fai", "fau", "fei", "feu", "foi", "fou",
"gai", "gau", "gei", "geu", "goi", "gou",
"hai", "hau", "hei", "heu", "hoi", "hou",
"jai", "jau", "jei", "jeu", "joi", "jou",
"kai", "kau", "kei", "keu", "koi", "kou",
"lai", "lau", "lei", "leu", "loi", "lou",
"mai", "mau", "mei", "meu", "moi", "mou",
"nai", "nau", "nei", "neu", "noi", "nou",
"pai", "pau", "pei", "peu", "poi", "pou",
"qai", "qau", "qei", "qeu", "qoi", "qou",
"rai", "rau", "rei", "reu", "roi", "rou",
"sai", "sau", "sei", "seu", "soi", "sou",
"tai", "tau", "tei", "teu", "toi", "tou",
"vai", "vau", "vei", "veu", "voi", "vou",
"wai", "wau", "wei", "weu", "woi", "wou",
"xai", "xau", "xei", "xeu", "xoi", "xou",
"yai", "yau", "yei", "yeu", "yoi", "you",
"zai", "zau", "zei", "zeu", "zoi", "zou",
"cha", "che", "chi", "cho", "chu",
"sha", "she", "shi", "sho", "shu",
"tha", "the", "thi", "tho", "thu",
"wha", "whe", "whi", "who", "whu",
"pha", "phe", "phi", "pho", "phu",
"zha", "zhe", "zhi", "zho", "zhu",
"tra", "tre", "tri", "tro", "tru",
"kha", "khe", "khi", "kho", "khu",
"gha", "ghe", "ghi", "gho", "ghu",
"sha", "she", "shi", "sho", "shu",
"twa", "twe", "twi", "two", "twu",
"qua", "que", "qui", "quo",
"sla", "sle", "sli", "slo", "slu",
];
const OFFENSIVE_TERMS: [&str; 20] = [
"shit", "fat", "whore", "nig",
"jig", "cum", "fag", "twat",
"jiz", "kum", "anal", "quere",
"phuk", "fuk", "arab", "dik",
"jew", "slut", "tit", "phag"
];
const affirmations: [&str; 8] = [
"You're doing great!",
"Keep up the good work!",
"Fantastic effort!",
"You're on the right track!",
"Excellent job!",
"You're making progress!",
"Keep it up!",
"Great work!",
];
pub fn random_affirmation() -> String {
let mut rng = rand::rng();
let index = rng.random_range(0..affirmations.len());
affirmations[index].to_string()
}
pub fn generate_name() -> String {
let mut rng = rand::rng();
let name_length = rng.random_range(2..5); // Generate a name with 2 to 4 syllables
let mut name = String::new();
for _ in 0..name_length {
let syllable = SYLLABLES[rng.random_range(0..SYLLABLES.len())];
name.push_str(syllable);
}
if OFFENSIVE_TERMS.iter().any(|&term| name.contains(term)) {
return generate_name(); // Regenerate the name if it contains offensive terms
}
if name.len() > 8 {
return generate_name(); // Regenerate the name if it contains too many characters
}
name[..1].to_ascii_uppercase() + &name[1..]
}
pub fn random_unit_intro() -> String {
let mut rng = rand::rng();
let unit_intros: [&str; 5] = [
"It's time to learn about",
"Let's dive into",
"Get ready to explore",
"Prepare to discover",
"Let's embark on a journey to learn about",
];
let index = rng.random_range(0..unit_intros.len());
unit_intros[index].to_string()
}

4
src/utilities/mod.rs Normal file
View File

@ -0,0 +1,4 @@
pub(crate) mod generators;
pub(crate) mod files;
pub(crate) mod printers;
pub(crate) mod questions;

271
src/utilities/printers.rs Normal file
View File

@ -0,0 +1,271 @@
use crossterm::{execute, style::Stylize, terminal::{size, Clear, ClearType}};
use std::io::stdout;
use super::generators;
const DELAY_SECONDS: u64 = 1;
#[derive(PartialEq)]
pub enum StatementType {
Default,
DelayExplanation,
Question,
GeneralFeedback,
CorrectFeedback,
IncorrectFeedback,
}
/// Clear the console
pub fn clear_console() {
execute!(stdout(), Clear(ClearType::All)).unwrap();
}
/// Pretty-print an instruction centered on the screen in an outlined box
pub fn print_boxed(instruction: &str, statement_type: StatementType) {
let (cols, rows) = size().unwrap();
let padding = 4;
let max_width = 100;
let instruction = instruction
.lines()
.flat_map(|line| {
let mut split_lines = vec![];
let mut current_line = String::new();
for word in line.split_whitespace() {
let word_width = word.chars().map(|c| if c.len_utf8() > 1 { 2 } else { 1 }).sum::<usize>();
if current_line.chars().map(|c| if c.len_utf8() > 1 { 2 } else { 1 }).sum::<usize>() + word_width + 1 > std::cmp::min(max_width, cols as usize) {
split_lines.push(current_line.trim_end().to_string());
current_line = String::new();
}
current_line.push_str(word);
current_line.push(' ');
}
if !current_line.is_empty() {
split_lines.push(current_line.trim_end().to_string());
}
split_lines
})
.collect::<Vec<_>>()
.join("\n");
let box_width = std::cmp::min(
instruction.lines().map(|line| line.chars().map(|c| c.len_utf8()).sum::<usize>()).max().unwrap_or(0) + padding * 2,
max_width,
);
let box_height = instruction.lines().count() + 2;
let start_col = (cols.saturating_sub(box_width as u16)) / 2;
let start_row = (rows.saturating_sub(box_height as u16)) / 2;
let top_border;
let bottom_border;
let empty_line;
match statement_type {
StatementType::Question => {
top_border = format!("{}{}{}", "".dark_blue(), "".repeat(box_width).dark_blue(), "".dark_blue());
bottom_border = format!("{}{}{}", "".dark_blue(), "".repeat(box_width).dark_blue(), "".dark_blue());
empty_line = format!("{}{}{}", "".dark_blue(), " ".repeat(box_width).dark_blue(), "".dark_blue());
}
StatementType::GeneralFeedback => {
top_border = format!("{}{}{}", "".dark_magenta(), "".repeat(box_width).dark_magenta(), "".dark_magenta());
bottom_border = format!("{}{}{}", "".dark_magenta(), "".repeat(box_width).dark_magenta(), "".dark_magenta());
empty_line = format!("{}{}{}", "".dark_magenta(), " ".repeat(box_width).dark_magenta(), "".dark_magenta());
}
StatementType::CorrectFeedback => {
top_border = format!("{}{}{}", "".green(), "".repeat(box_width).green(), "".green());
bottom_border = format!("{}{}{}", "".green(), "".repeat(box_width).green(), "".green());
empty_line = format!("{}{}{}", "".green(), " ".repeat(box_width).green(), "".green());
}
StatementType::IncorrectFeedback => {
top_border = format!("{}{}{}", "".red(), "".repeat(box_width).red(), "".red());
bottom_border = format!("{}{}{}", "".red(), "".repeat(box_width).red(), "".red());
empty_line = format!("{}{}{}", "".red(), " ".repeat(box_width).red(), "".red());
}
_ => {
top_border = format!("{}", "".repeat(box_width));
bottom_border = format!("{}", "".repeat(box_width));
empty_line = format!("{}", " ".repeat(box_width));
}
}
clear_console();
for i in 0..box_height {
let line = match i {
0 => &top_border,
i if i == box_height - 1 => &bottom_border,
i if i > 0 && i < box_height - 1 => {
let line_content = instruction
.lines()
.nth(i - 1)
.unwrap_or("")
.trim();
let line_width = line_content.chars().map(|c| if c.len_utf8() > 1 { 2 } else { 1 }).sum::<usize>();
match statement_type {
StatementType::Question => {
&format!(
"{}{}{}{}{}",
"".dark_blue(),
" ".repeat((box_width.saturating_sub(line_width)) / 2),
line_content,
" ".repeat((box_width.saturating_sub(line_width) + 1) / 2),
"".dark_blue(),
)
}
StatementType::GeneralFeedback => {
&format!(
"{}{}{}{}{}",
"".dark_magenta(),
" ".repeat((box_width.saturating_sub(line_width)) / 2),
line_content,
" ".repeat((box_width.saturating_sub(line_width) + 1) / 2),
"".dark_magenta(),
)
}
StatementType::CorrectFeedback => {
&format!(
"{}{}{}{}{}",
"".green(),
" ".repeat((box_width.saturating_sub(line_width)) / 2),
line_content,
" ".repeat((box_width.saturating_sub(line_width) + 1) / 2),
"".green(),
)
}
StatementType::IncorrectFeedback => {
&format!(
"{}{}{}{}{}",
"".red(),
" ".repeat((box_width.saturating_sub(line_width)) / 2),
line_content,
" ".repeat((box_width.saturating_sub(line_width) + 1) / 2),
"".red(),
)
}
_ => {
&format!(
"│{}{}{}│",
" ".repeat((box_width.saturating_sub(line_width)) / 2),
line_content,
" ".repeat((box_width.saturating_sub(line_width) + 1) / 2),
)
}
}
}
_ => &empty_line,
};
println!("\x1B[{};{}H{}", start_row + i as u16, start_col + 1, line);
}
match statement_type {
StatementType::Question => {
return;
}
StatementType::DelayExplanation => {
wait_for_input_delay(DELAY_SECONDS);
print_boxed(&instruction, StatementType::Default);
}
_ => {
wait_for_input();
}
}
}
/// Prints a full-width ruler of designated characters
pub fn print_rule(character: char, padding: (bool, bool)) {
let (cols, _) = size().unwrap();
println!(
"{}{}{}",
if padding.0 { "\n" } else { "" },
character.to_string().repeat(cols as usize),
if padding.1 { "\n" } else { "" }
);
}
/// Horizontally center the desired text on the screen
pub fn print_centered(text: &str) {
let (cols, _) = size().unwrap();
let text_width = text.len() as u16;
let start_col = (cols.saturating_sub(text_width)) / 2;
println!(
"\x1B[{}C{}",
start_col,
text
);
}
/// Wait for input from the user
pub fn wait_for_input() {
let prompt = "Press enter to continue";
let (cols, _) = size().unwrap();
let text_width = prompt.len() as u16;
let start_col = (cols.saturating_sub(text_width)) / 2 + 1;
println!(
"\x1B[{}C{}",
start_col,
prompt.italic().dark_blue()
);
// println!("{}", prompt.italic().dark_blue());
crossterm::event::read().unwrap();
}
/// Wait for input from the user
pub fn print_title(title: &str) {
let (cols, _) = size().unwrap();
let text_width = title.len() as u16;
let start_col = (cols.saturating_sub(text_width)) / 2 + 1;
println!(
"\x1B[{}C{}",
start_col,
title.bold()
);
wait_for_input();
}
/// Wait for input from the user
pub fn wait_for_input_delay(seconds: u64) {
let prompt = format!("Remember to be mindful. Advance in {} seconds.", seconds);
let (cols, _) = size().unwrap();
let text_width = prompt.len() as u16;
let start_col = (cols.saturating_sub(text_width)) / 2 + 1;
println!(
"\x1B[{}C{}",
start_col,
prompt.italic().dark_blue()
);
std::thread::sleep(std::time::Duration::from_secs(DELAY_SECONDS));
}
/// Wait for input from the user
pub fn inform_delay(seconds: u64) {
let prompt = format!("Remember to be mindful. Answers appear in {} seconds.", seconds);
let (cols, _) = size().unwrap();
let text_width = prompt.len() as u16;
let start_col = (cols.saturating_sub(text_width)) / 2 + 1;
println!(
"\x1B[{}C{}",
start_col,
prompt.italic().dark_blue()
);
}
pub fn unit_screen(title: &str) {
clear_console();
print_boxed(title, StatementType::GeneralFeedback);
// print_boxed(&format!("{}", description), StatementType::Default);
}
pub fn print_screen(text: &str) {
clear_console();
print_boxed(text, StatementType::Default);
}

View File

@ -0,0 +1,97 @@
use inquire::{MultiSelect, Select, Text};
use crossterm::terminal;
use super::printers::{clear_console, inform_delay, print_boxed, StatementType};
const DELAY_SECONDS: u64 = 1;
pub fn ask_mcq_delayed(question: &str, options: &[&str]) -> Option<String> {
print_boxed(question, StatementType::Question);
inform_delay(DELAY_SECONDS);
std::thread::sleep(std::time::Duration::from_secs(DELAY_SECONDS));
ask_mcq(question, options)
}
/// Print a quiz question in an attractive format and get the user's choice
pub fn ask_mcq(question: &str, options: &[&str]) -> Option<String> {
clear_console();
// Calculate vertical centering
let rows = terminal::size().map(|(_, height)| height as usize).unwrap_or(0) - options.len() - 2;
print_boxed(&format!("{}", question), StatementType::Question);
for _ in 0..(rows/2) {
println!();
}
let ans = Select::new("Your response:", options.to_vec())
.prompt();
match ans {
Ok(choice) => Some(String::from(choice)),
Err(_) => None,
}
}
/// Print a quiz question in an attractive format and get the user's choices
pub fn ask_multi_select(question: &str, options: &[&str]) -> Option<Vec<String>> {
clear_console();
// Calculate vertical centering
let rows = terminal::size().map(|(_, height)| height as usize).unwrap_or(0) - options.len() - 2;
print_boxed(&format!("{}", question), StatementType::Question);
for _ in 0..(rows/2) {
println!();
}
let ans = MultiSelect::new("Select all applicable answers with SPACEBAR, then press RETURN to submit:", options.to_vec())
.prompt();
match ans {
Ok(choices) => Some(choices.into_iter().map(String::from).collect()),
Err(_) => None,
}
}
/// Print a question in an attractive format and get the user's text input
pub fn ask_plaintext(question: &str) -> Option<String> {
clear_console();
// Calculate vertical centering
let rows = terminal::size().map(|(_, height)| height as usize).unwrap_or(0) - 3;
print_boxed(&format!("{}", question), StatementType::Question);
for _ in 0..(rows/2) {
println!();
}
let ans = Text::new("Please provide your input:")
.prompt();
match ans {
Ok(choice) => Some(choice),
Err(_) => None,
}
}
pub fn ask_name() -> String {
let name = ask_plaintext("What is your name?").unwrap_or_else(|| {
let default_name = super::generators::generate_name();
print_boxed(&format!("Failed to get name. I'll give you a random one: {}.", default_name), StatementType::IncorrectFeedback);
default_name
});
if name .is_empty() {
let default_name = super::generators::generate_name();
print_boxed(&format!("You didn't provide a name. I'll give you a random one: {}.", default_name), StatementType::IncorrectFeedback);
return default_name;
}
print_boxed(&format!("Welcome, {}! Thanks for joining me.", name), StatementType::Default);
name
}