Given a projectile speed s in m/s, the angle of the initial velocity beta in degrees, and the height of the origin y, calculate the time until the projectile arrives at the ground (y=0).
Further note:
- Assume there is no drag.
- 0 <= s, y <= 1000, 0 <= beta <= 90,
- The time has to be accurate to three decimals.
- Use 9.81 m/s^2 as the gravitional acceleration.
import math
def calc_time_of_impact(speed, angle, origin_y):
x_value = 0
y_value = 0
time = 0.0001
def x_formula(t): return speed * t * \
math.cos(angle * math.pi / 180)
def y_formula(t): return speed * t * \
math.sin(angle * math.pi / 180) - 9.81/2*t*t
while y_value >= -origin_y:
x_value = x_formula(time)
y_value = y_formula(time)
time += 0.0001
return time
import unittest
import solution
class TestNonRandomInputs(unittest.TestCase):
def test_minimum(self):
self.assertEqual(
round(solution.calc_time_of_impact(0, 0, 0), 3), 0.000, "Minimum values should yield 0.000.")
def test_maximum(self):
self.assertEqual(
round(solution.calc_time_of_impact(1000, 90, 1000), 3), 204.869, "Maximum values should yield 204.869.")
def test_middleground(self):
self.assertEqual(
round(solution.calc_time_of_impact(279, 52, 776), 3), 48.111, "Values 279, 52, 776 should yield 48.111")
if __name__ == '__main__':
unittest.main()