]> git.treefish.org Git - photosort.git/blobdiff - src/photosort.py
re-implemented cleanup
[photosort.git] / src / photosort.py
index a4b319ebf12044413079359fed49b153af892968..a25b70b442885993bf1759f0dfb6ae795a416287 100755 (executable)
@@ -12,42 +12,57 @@ parser.add_argument('SOURCE_DIR', type=str, help='source directory')
 parser.add_argument('DEST_DIR', type=str, help='target directory')
 parser.add_argument('-c', '--cleanup', action='store_true', dest='cleanup',
                     default=False, help='clean-up source dir')
-parser.add_argument('-l', '--log-level', type=str, default='INFO', dest='log_lvl',
-                    choices=['DEBUG', 'INFO', 'WARNING'], help='select log level')
+parser.add_argument('-v', '--verbose', action='store_true', dest='verbose',
+                    default=False, help='enable verbose output')
+parser.add_argument('-q', '--quiet', action='store_true', dest='quiet',
+                    default=False, help='suppress non-error output')
 
 args = parser.parse_args()
 
+if args.verbose:
+    log_level = logging.INFO
+elif args.quiet:
+    log_level = logging.ERROR
+else:
+    log_level = logging.WARNING
+
 logging.basicConfig(format='[%(asctime)s] %(levelname)s: %(message)s',
-                    level=logging.getLevelName(args.log_lvl),
-                    datefmt='%m/%d/%Y %H:%M:%S')
+                    level=log_level, datefmt='%m/%d/%Y %H:%M:%S')
 
 for src_file_name, src_file_path in misc.walk_media_files(args.SOURCE_DIR):
     logging.info('Processing %s...', src_file_name)
 
-    exif_time = misc.extract_timestamp(src_file_path, use_exif=True)
-
-    dst_dir = os.path.join(args.DEST_DIR,
-                           datetime.datetime.fromtimestamp(exif_time).strftime("%Y/%m"))
-    dst_file_path = os.path.join(dst_dir, src_file_name)
-
-    if not os.path.exists(dst_file_path):
-        alt_dst_dir = misc.find_file(args.DEST_DIR,
-                                     src_file_name,
-                                     os.path.getsize(src_file_path),
-                                     exclude_dir=args.SOURCE_DIR)
-        if alt_dst_dir:
-            dst_dir = alt_dst_dir
-            dst_file_path = os.path.join(dst_dir, src_file_name)
-
-    if not os.path.exists(dst_file_path):
-        if not os.path.exists(dst_dir):
-            os.makedirs(dst_dir)
-        misc.import_file(src_file_path, dst_file_path)
-    else:
-        src_time = misc.extract_timestamp(src_file_path)
-        dst_time = misc.extract_timestamp(dst_file_path)
-        if src_time > dst_time:
+    try:
+        meta_time = misc.extract_timestamp(src_file_path, use_meta=True)
+
+        dst_dir = os.path.join(args.DEST_DIR,
+                               datetime.datetime.fromtimestamp(meta_time).strftime("%Y/%m"))
+        dst_file_path = os.path.join(dst_dir, src_file_name)
+
+        if not os.path.exists(dst_file_path):
+            alt_dst_dir = misc.find_file(args.DEST_DIR,
+                                         src_file_name,
+                                         os.path.getsize(src_file_path),
+                                         exclude_dir=args.SOURCE_DIR)
+            if alt_dst_dir:
+                dst_dir = alt_dst_dir
+                dst_file_path = os.path.join(dst_dir, src_file_name)
+
+        if not os.path.exists(dst_file_path):
+            if not os.path.exists(dst_dir):
+                os.makedirs(dst_dir)
             misc.import_file(src_file_path, dst_file_path)
+        else:
+            src_time = misc.extract_timestamp(src_file_path)
+            dst_time = misc.extract_timestamp(dst_file_path)
+            if src_time > dst_time:
+                misc.import_file(src_file_path, dst_file_path)
+
+        if args.cleanup:
+            os.remove(src_file_path)
+
+    except Exception as e:
+        logging.error('Error processing %s: %s', src_file_path, str(e))
 
 if args.cleanup:
-    misc.delete_dir_contents(args.SOURCE_DIR)
+    misc.cleanup_dir(args.SOURCE_DIR)