Another timelapse script: Using OpenCV to blend image transitions and overlay timestamp

In the past, I've used the Python imaging library to blend images and place text overlays on frames. I recently wrote a version that does this in one single script, using OpenCV for manipulating the images. Maybe it's useful to someone...

Setting up the Raspberry Pi

The bash script below creates a time-stamped file name for each image and saves it the "timelapse" folder in the home directory (/home/pi/timelapse). The file name follows the convention "YYYY-MM-DD_hhmm.jpg"

#!/bin/bash
 
DATE=$(date +"%Y-%m-%d_%H%M")
 
raspistill -hf -vf -o /home/pi/timelapse/$DATE.jpg
To have it run every minute, I added this to my crontab:
*/1 6-19 * * * /home/pi/bin/make_raspistill.sh

The python script to create the .mp4 movie

Here's the help output when running the script with the "-h" argument:

python make_movie_cv2.py -h
usage: make_movie_cv2.py [-h] [-o OUTPUT] [-v] [-t] [-b BLEND] [--fps FPS]
                         [--timestamp_format TIMESTAMP_FORMAT] [--ts]
                         imgpath
 
positional arguments:
  imgpath               Path to the image files
 
optional arguments:
  -h, --help            show this help message and exit
  -o OUTPUT, --output OUTPUT
                        Output file name (Default: output.mp4)
  -v, --view            View output
  -t, --test            Test run only. Do not create video.
  -b BLEND, --blend BLEND
                        Create (1 to 5) intermediate images between frames.
                        Default: 0 (off)
  --fps FPS             Frames per second (fps) for movie
  --timestamp_format TIMESTAMP_FORMAT
                        Time stamp format; default: "Y-m-d_HM"
  --ts                  Show timestamp

The script

Written for Python 3 and needs OpenCV installed. If the location of the text overlay needs to be changed, it has to be done manually in the code (in the "format_timestamp" function).

  1. #!/usr/local/env python3
  2.  
  3. # Program to create a timelapse movie from an
  4. # image sequence shot by my Raspberry Pi.
  5. # Depends on OpenCV (cv2)
  6. #
  7. # My RasPi usually takes one image
  8. # every minute (scheduled with "crontab"),
  9. # and saves it as a time-stamped JPG.
  10. #
  11. # This script loads the JPGs contained in one
  12. # directory, and creates an output movie (mp4).
  13. # It gives you the option of blending images in
  14. # variable steps from img_t to img_t+1.
  15. #
  16. # Calling this script with the "-h" argument will
  17. # spit out help
  18.  
  19. import cv2
  20. import numpy as np
  21. import os
  22. import argparse
  23. import time
  24. from datetime import datetime
  25.  
  26. PAUSE_T = 100 # defines pause for live-preview of movie
  27.  
  28.  
  29. def parse_timestamp(ts, dateformat='%Y-%m-%d_%H%M'):
  30. """
  31. Parses a timestamp string and returns newly formatted
  32. strings used in the text overlay in a dictionary.
  33. """
  34. # e.g. 2018-02-09_1750
  35. # Create datetime object with the dateformat specified
  36. # in the dateformat argument:
  37. dt = datetime.strptime(ts, dateformat)
  38. return {'day': dt.strftime('%A'),
  39. 'date': dt.strftime('%m-%d-%Y'),
  40. 'time': dt.strftime('%H:%M')}
  41.  
  42. def overlay_text(txt, img, ll, fscale=1):
  43. """Use OpenCV to overlay text"""
  44. font = cv2.FONT_HERSHEY_DUPLEX
  45. bottomLeftCornerOfText = ll
  46. fontScale = fscale
  47. fontColor = (0,0,0) #(0,140,255) # BGR
  48. lineType = 3
  49.  
  50. cv2.putText(img, txt,
  51. bottomLeftCornerOfText,
  52. font,
  53. fontScale,
  54. fontColor,
  55. lineType)
  56.  
  57.  
  58. def format_timestamp(img, info, imgsize):
  59. """
  60. Change the text positions here to format time stamp to your liking.
  61. info is a dictionary containing the info to print here
  62. """
  63. overlay_text(info['mincounter'],
  64. img,
  65. ll=(40, imgsize[1]-40),
  66. fscale=3)
  67.  
  68. overlay_text(info['day'],
  69. img,
  70. ll=(40, imgsize[1]-300),
  71. fscale=3)
  72.  
  73. overlay_text(info['date'],
  74. img,
  75. ll=(50, imgsize[1]-240),
  76. fscale=1.5)
  77.  
  78. overlay_text(info['time'],
  79. img,
  80. ll=(40, imgsize[1]-150),
  81. fscale=3)
  82.  
  83.  
  84. def main():
  85. P = argparse.ArgumentParser()
  86. P.add_argument('imgpath', nargs=1,
  87. help='Path to the image files')
  88. P.add_argument('-o', '--output',
  89. help='Output file name (Default: output.mp4)',
  90. default='output.mp4')
  91. P.add_argument('-v', '--view',
  92. help='View output',
  93. action='store_true')
  94. P.add_argument('-t', '--test',
  95. help='Test run only. Do not create video.',
  96. action='store_true')
  97. P.add_argument('-b', '--blend',
  98. help='Create (1 to 5) intermediate images between frames. Default: 0 (off)',
  99. default=0,
  100. type=int)
  101. P.add_argument('--fps',
  102. help='Frames per second (fps) for movie',
  103. type=int,
  104. default=12)
  105. P.add_argument('--timestamp_format',
  106. help='Time stamp format; default: "Y-m-d_HM"',
  107. default='%Y-%m-%d_%H%M')
  108. P.add_argument('--ts', help='Show timestamp', action='store_true')
  109. ARGS = P.parse_args()
  110.  
  111. imgpath = ARGS.imgpath[0]
  112. imgs = [i for i in os.listdir(imgpath) if i.endswith('.jpg')]
  113. imgs_sorted = imgs.sort()
  114. video_out = ARGS.output
  115. if not video_out.endswith('.mp4'):
  116. video_out += '.mp4'
  117. frame = cv2.imread(os.path.join(imgpath,imgs[0]))
  118. height, width, channels = frame.shape
  119. if ARGS.test:
  120. print('Testing only. No actual video file will be written.')
  121. if ARGS.blend:
  122. print('Blending ON. Creating {} intermediate images between each frame pair'.format(ARGS.blend))
  123. print('Information:')
  124. max_imgs = len(imgs)
  125. print('{} images found in target folder'.format(max_imgs))
  126. print('First image height: {}, width: {}'.format(height, width))
  127. print('All subsequent images have to have the same dimensions!')
  128. size = (width, height)
  129. if not ARGS.test:
  130. fourcc = cv2.VideoWriter_fourcc(*'mp4v')
  131. videowriter = cv2.VideoWriter()
  132. try:
  133. videowriter.open(video_out, fourcc, ARGS.fps, size)
  134. print('Opened video writer')
  135. except:
  136. print('Failed to open video writer')
  137.  
  138. # go through all the images in the folder:
  139. for n,i in enumerate(imgs):
  140. print('Current image: {}'.format(i))
  141. curr_img = cv2.imread(os.path.join(imgpath,i))
  142. ds = os.path.splitext(i)[0]
  143. info = parse_timestamp(ds, dateformat=ARGS.timestamp_format)
  144. info['mincounter'] = '{:>3d} {:<s}'.format(n+1, 'min.')
  145. if ARGS.ts:
  146. format_timestamp(curr_img, info, (width, height))
  147.  
  148. if ARGS.view:
  149. img_resized = cv2.resize(curr_img,
  150. None,
  151. fx = 0.5,
  152. fy = 0.5,
  153. interpolation = cv2.INTER_CUBIC)
  154. cv2.imshow('image', img_resized)
  155. cv2.waitKey(PAUSE_T)
  156. if not ARGS.test:
  157. videowriter.write(curr_img)
  158. # if no blending, we are done for this loop, otherwise:
  159. if ARGS.blend and n < max_imgs-1:
  160. # create intermediate images
  161. print('Creating {} intermediate blends to image {}:'.format(ARGS.blend,
  162. imgs[n+1]))
  163. # image one is current one, also read next one:
  164. next_img = cv2.imread(os.path.join(imgpath, imgs[n+1]))
  165. # blending steps:
  166. delta_alpha = (float)(1/(ARGS.blend+1))
  167. alpha = 0.0 # init.
  168. for counter in range(0, ARGS.blend):
  169. alpha = alpha + delta_alpha
  170. print('\tBlend alpha: {}'.format(alpha))
  171. blendimg = np.zeros((height, width, 3), np.uint8)
  172. cv2.addWeighted(curr_img, (1-alpha), next_img, alpha, 0, blendimg)
  173.  
  174. if ARGS.ts:
  175. format_timestamp(blendimg, info, (width, height))
  176.  
  177. if ARGS.view:
  178. img_resized = cv2.resize(blendimg,
  179. None,
  180. fx = 0.5,
  181. fy = 0.5,
  182. interpolation = cv2.INTER_CUBIC)
  183. cv2.imshow('image', img_resized)
  184. cv2.waitKey(PAUSE_T)
  185. # write blended image to file:
  186. if not ARGS.test:
  187. videowriter.write(blendimg)
  188.  
  189. if not ARGS.test:
  190. videowriter.release()
  191. cv2.destroyAllWindows()
  192. print('Done.')
  193.  
  194.  
  195. if __name__ == "__main__":
  196. main()

Here's a link to the code as a Bitbucket snippet.



Category: 
Code